fixed subscription table
This commit is contained in:
parent
a1ab31acfe
commit
ef5f57e678
247
.venv/bin/Activate.ps1
Normal file
247
.venv/bin/Activate.ps1
Normal file
@ -0,0 +1,247 @@
|
||||
<#
|
||||
.Synopsis
|
||||
Activate a Python virtual environment for the current PowerShell session.
|
||||
|
||||
.Description
|
||||
Pushes the python executable for a virtual environment to the front of the
|
||||
$Env:PATH environment variable and sets the prompt to signify that you are
|
||||
in a Python virtual environment. Makes use of the command line switches as
|
||||
well as the `pyvenv.cfg` file values present in the virtual environment.
|
||||
|
||||
.Parameter VenvDir
|
||||
Path to the directory that contains the virtual environment to activate. The
|
||||
default value for this is the parent of the directory that the Activate.ps1
|
||||
script is located within.
|
||||
|
||||
.Parameter Prompt
|
||||
The prompt prefix to display when this virtual environment is activated. By
|
||||
default, this prompt is the name of the virtual environment folder (VenvDir)
|
||||
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
|
||||
|
||||
.Example
|
||||
Activate.ps1
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Verbose
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and shows extra information about the activation as it executes.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
|
||||
Activates the Python virtual environment located in the specified location.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Prompt "MyPython"
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and prefixes the current prompt with the specified string (surrounded in
|
||||
parentheses) while the virtual environment is active.
|
||||
|
||||
.Notes
|
||||
On Windows, it may be required to enable this Activate.ps1 script by setting the
|
||||
execution policy for the user. You can do this by issuing the following PowerShell
|
||||
command:
|
||||
|
||||
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||
|
||||
For more information on Execution Policies:
|
||||
https://go.microsoft.com/fwlink/?LinkID=135170
|
||||
|
||||
#>
|
||||
Param(
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$VenvDir,
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$Prompt
|
||||
)
|
||||
|
||||
<# Function declarations --------------------------------------------------- #>
|
||||
|
||||
<#
|
||||
.Synopsis
|
||||
Remove all shell session elements added by the Activate script, including the
|
||||
addition of the virtual environment's Python executable from the beginning of
|
||||
the PATH variable.
|
||||
|
||||
.Parameter NonDestructive
|
||||
If present, do not remove this function from the global namespace for the
|
||||
session.
|
||||
|
||||
#>
|
||||
function global:deactivate ([switch]$NonDestructive) {
|
||||
# Revert to original values
|
||||
|
||||
# The prior prompt:
|
||||
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
|
||||
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
|
||||
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
|
||||
# The prior PYTHONHOME:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
}
|
||||
|
||||
# The prior PATH:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
|
||||
}
|
||||
|
||||
# Just remove the VIRTUAL_ENV altogether:
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV
|
||||
}
|
||||
|
||||
# Just remove VIRTUAL_ENV_PROMPT altogether.
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
|
||||
}
|
||||
|
||||
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
|
||||
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
|
||||
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
|
||||
}
|
||||
|
||||
# Leave deactivate function in the global namespace if requested:
|
||||
if (-not $NonDestructive) {
|
||||
Remove-Item -Path function:deactivate
|
||||
}
|
||||
}
|
||||
|
||||
<#
|
||||
.Description
|
||||
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
|
||||
given folder, and returns them in a map.
|
||||
|
||||
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
|
||||
two strings separated by `=` (with any amount of whitespace surrounding the =)
|
||||
then it is considered a `key = value` line. The left hand string is the key,
|
||||
the right hand is the value.
|
||||
|
||||
If the value starts with a `'` or a `"` then the first and last character is
|
||||
stripped from the value before being captured.
|
||||
|
||||
.Parameter ConfigDir
|
||||
Path to the directory that contains the `pyvenv.cfg` file.
|
||||
#>
|
||||
function Get-PyVenvConfig(
|
||||
[String]
|
||||
$ConfigDir
|
||||
) {
|
||||
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
|
||||
|
||||
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
|
||||
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
|
||||
|
||||
# An empty map will be returned if no config file is found.
|
||||
$pyvenvConfig = @{ }
|
||||
|
||||
if ($pyvenvConfigPath) {
|
||||
|
||||
Write-Verbose "File exists, parse `key = value` lines"
|
||||
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
|
||||
|
||||
$pyvenvConfigContent | ForEach-Object {
|
||||
$keyval = $PSItem -split "\s*=\s*", 2
|
||||
if ($keyval[0] -and $keyval[1]) {
|
||||
$val = $keyval[1]
|
||||
|
||||
# Remove extraneous quotations around a string value.
|
||||
if ("'""".Contains($val.Substring(0, 1))) {
|
||||
$val = $val.Substring(1, $val.Length - 2)
|
||||
}
|
||||
|
||||
$pyvenvConfig[$keyval[0]] = $val
|
||||
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
|
||||
}
|
||||
}
|
||||
}
|
||||
return $pyvenvConfig
|
||||
}
|
||||
|
||||
|
||||
<# Begin Activate script --------------------------------------------------- #>
|
||||
|
||||
# Determine the containing directory of this script
|
||||
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$VenvExecDir = Get-Item -Path $VenvExecPath
|
||||
|
||||
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
|
||||
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
|
||||
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
|
||||
|
||||
# Set values required in priority: CmdLine, ConfigFile, Default
|
||||
# First, get the location of the virtual environment, it might not be
|
||||
# VenvExecDir if specified on the command line.
|
||||
if ($VenvDir) {
|
||||
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
|
||||
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
|
||||
Write-Verbose "VenvDir=$VenvDir"
|
||||
}
|
||||
|
||||
# Next, read the `pyvenv.cfg` file to determine any required value such
|
||||
# as `prompt`.
|
||||
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
|
||||
|
||||
# Next, set the prompt from the command line, or the config file, or
|
||||
# just use the name of the virtual environment folder.
|
||||
if ($Prompt) {
|
||||
Write-Verbose "Prompt specified as argument, using '$Prompt'"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
|
||||
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
|
||||
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
|
||||
$Prompt = $pyvenvCfg['prompt'];
|
||||
}
|
||||
else {
|
||||
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
|
||||
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
|
||||
$Prompt = Split-Path -Path $venvDir -Leaf
|
||||
}
|
||||
}
|
||||
|
||||
Write-Verbose "Prompt = '$Prompt'"
|
||||
Write-Verbose "VenvDir='$VenvDir'"
|
||||
|
||||
# Deactivate any currently active virtual environment, but leave the
|
||||
# deactivate function in place.
|
||||
deactivate -nondestructive
|
||||
|
||||
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
|
||||
# that there is an activated venv.
|
||||
$env:VIRTUAL_ENV = $VenvDir
|
||||
|
||||
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
||||
|
||||
Write-Verbose "Setting prompt to '$Prompt'"
|
||||
|
||||
# Set the prompt to include the env name
|
||||
# Make sure _OLD_VIRTUAL_PROMPT is global
|
||||
function global:_OLD_VIRTUAL_PROMPT { "" }
|
||||
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
|
||||
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
|
||||
|
||||
function global:prompt {
|
||||
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
|
||||
_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
$env:VIRTUAL_ENV_PROMPT = $Prompt
|
||||
}
|
||||
|
||||
# Clear PYTHONHOME
|
||||
if (Test-Path -Path Env:PYTHONHOME) {
|
||||
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
Remove-Item -Path Env:PYTHONHOME
|
||||
}
|
||||
|
||||
# Add the venv to the PATH
|
||||
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
|
||||
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
|
70
.venv/bin/activate
Normal file
70
.venv/bin/activate
Normal file
@ -0,0 +1,70 @@
|
||||
# This file must be used with "source bin/activate" *from bash*
|
||||
# You cannot run it directly
|
||||
|
||||
deactivate () {
|
||||
# reset old environment variables
|
||||
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
||||
PATH="${_OLD_VIRTUAL_PATH:-}"
|
||||
export PATH
|
||||
unset _OLD_VIRTUAL_PATH
|
||||
fi
|
||||
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
||||
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
||||
export PYTHONHOME
|
||||
unset _OLD_VIRTUAL_PYTHONHOME
|
||||
fi
|
||||
|
||||
# Call hash to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
hash -r 2> /dev/null
|
||||
|
||||
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
||||
PS1="${_OLD_VIRTUAL_PS1:-}"
|
||||
export PS1
|
||||
unset _OLD_VIRTUAL_PS1
|
||||
fi
|
||||
|
||||
unset VIRTUAL_ENV
|
||||
unset VIRTUAL_ENV_PROMPT
|
||||
if [ ! "${1:-}" = "nondestructive" ] ; then
|
||||
# Self destruct!
|
||||
unset -f deactivate
|
||||
fi
|
||||
}
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate nondestructive
|
||||
|
||||
# on Windows, a path can contain colons and backslashes and has to be converted:
|
||||
if [ "$OSTYPE" = "cygwin" ] || [ "$OSTYPE" = "msys" ] ; then
|
||||
# transform D:\path\to\venv to /d/path/to/venv on MSYS
|
||||
# and to /cygdrive/d/path/to/venv on Cygwin
|
||||
export VIRTUAL_ENV=$(cygpath "/Users/user/dev/testpks/testpks/.venv")
|
||||
else
|
||||
# use the path as-is
|
||||
export VIRTUAL_ENV="/Users/user/dev/testpks/testpks/.venv"
|
||||
fi
|
||||
|
||||
_OLD_VIRTUAL_PATH="$PATH"
|
||||
PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
export PATH
|
||||
|
||||
# unset PYTHONHOME if set
|
||||
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
||||
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
||||
if [ -n "${PYTHONHOME:-}" ] ; then
|
||||
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
||||
unset PYTHONHOME
|
||||
fi
|
||||
|
||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
||||
_OLD_VIRTUAL_PS1="${PS1:-}"
|
||||
PS1="(.venv) ${PS1:-}"
|
||||
export PS1
|
||||
VIRTUAL_ENV_PROMPT="(.venv) "
|
||||
export VIRTUAL_ENV_PROMPT
|
||||
fi
|
||||
|
||||
# Call hash to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
hash -r 2> /dev/null
|
27
.venv/bin/activate.csh
Normal file
27
.venv/bin/activate.csh
Normal file
@ -0,0 +1,27 @@
|
||||
# This file must be used with "source bin/activate.csh" *from csh*.
|
||||
# You cannot run it directly.
|
||||
|
||||
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
||||
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
||||
|
||||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
setenv VIRTUAL_ENV "/Users/user/dev/testpks/testpks/.venv"
|
||||
|
||||
set _OLD_VIRTUAL_PATH="$PATH"
|
||||
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
|
||||
|
||||
|
||||
set _OLD_VIRTUAL_PROMPT="$prompt"
|
||||
|
||||
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
||||
set prompt = "(.venv) $prompt"
|
||||
setenv VIRTUAL_ENV_PROMPT "(.venv) "
|
||||
endif
|
||||
|
||||
alias pydoc python -m pydoc
|
||||
|
||||
rehash
|
69
.venv/bin/activate.fish
Normal file
69
.venv/bin/activate.fish
Normal file
@ -0,0 +1,69 @@
|
||||
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
|
||||
# (https://fishshell.com/). You cannot run it directly.
|
||||
|
||||
function deactivate -d "Exit virtual environment and return to normal shell environment"
|
||||
# reset old environment variables
|
||||
if test -n "$_OLD_VIRTUAL_PATH"
|
||||
set -gx PATH $_OLD_VIRTUAL_PATH
|
||||
set -e _OLD_VIRTUAL_PATH
|
||||
end
|
||||
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
||||
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||
end
|
||||
|
||||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||
# prevents error when using nested fish instances (Issue #93858)
|
||||
if functions -q _old_fish_prompt
|
||||
functions -e fish_prompt
|
||||
functions -c _old_fish_prompt fish_prompt
|
||||
functions -e _old_fish_prompt
|
||||
end
|
||||
end
|
||||
|
||||
set -e VIRTUAL_ENV
|
||||
set -e VIRTUAL_ENV_PROMPT
|
||||
if test "$argv[1]" != "nondestructive"
|
||||
# Self-destruct!
|
||||
functions -e deactivate
|
||||
end
|
||||
end
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
set -gx VIRTUAL_ENV "/Users/user/dev/testpks/testpks/.venv"
|
||||
|
||||
set -gx _OLD_VIRTUAL_PATH $PATH
|
||||
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
|
||||
|
||||
# Unset PYTHONHOME if set.
|
||||
if set -q PYTHONHOME
|
||||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||
set -e PYTHONHOME
|
||||
end
|
||||
|
||||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||
# fish uses a function instead of an env var to generate the prompt.
|
||||
|
||||
# Save the current fish_prompt function as the function _old_fish_prompt.
|
||||
functions -c fish_prompt _old_fish_prompt
|
||||
|
||||
# With the original prompt function renamed, we can override with our own.
|
||||
function fish_prompt
|
||||
# Save the return status of the last command.
|
||||
set -l old_status $status
|
||||
|
||||
# Output the venv prompt; color taken from the blue of the Python logo.
|
||||
printf "%s%s%s" (set_color 4B8BBE) "(.venv) " (set_color normal)
|
||||
|
||||
# Restore the return status of the previous command.
|
||||
echo "exit $old_status" | .
|
||||
# Output the original/"old" prompt.
|
||||
_old_fish_prompt
|
||||
end
|
||||
|
||||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||
set -gx VIRTUAL_ENV_PROMPT "(.venv) "
|
||||
end
|
8
.venv/bin/ipython
Executable file
8
.venv/bin/ipython
Executable file
@ -0,0 +1,8 @@
|
||||
#!/Users/user/dev/testpks/testpks/.venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from IPython import start_ipython
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(start_ipython())
|
8
.venv/bin/ipython3
Executable file
8
.venv/bin/ipython3
Executable file
@ -0,0 +1,8 @@
|
||||
#!/Users/user/dev/testpks/testpks/.venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from IPython import start_ipython
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(start_ipython())
|
8
.venv/bin/markdown-it
Executable file
8
.venv/bin/markdown-it
Executable file
@ -0,0 +1,8 @@
|
||||
#!/Users/user/dev/testpks/testpks/.venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from markdown_it.cli.parse import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
.venv/bin/pip
Executable file
8
.venv/bin/pip
Executable file
@ -0,0 +1,8 @@
|
||||
#!/Users/user/dev/testpks/testpks/.venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
.venv/bin/pip3
Executable file
8
.venv/bin/pip3
Executable file
@ -0,0 +1,8 @@
|
||||
#!/Users/user/dev/testpks/testpks/.venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
.venv/bin/pip3.12
Executable file
8
.venv/bin/pip3.12
Executable file
@ -0,0 +1,8 @@
|
||||
#!/Users/user/dev/testpks/testpks/.venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
8
.venv/bin/pygmentize
Executable file
8
.venv/bin/pygmentize
Executable file
@ -0,0 +1,8 @@
|
||||
#!/Users/user/dev/testpks/testpks/.venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pygments.cmdline import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
1
.venv/bin/python
Symbolic link
1
.venv/bin/python
Symbolic link
@ -0,0 +1 @@
|
||||
python3.12
|
1
.venv/bin/python3
Symbolic link
1
.venv/bin/python3
Symbolic link
@ -0,0 +1 @@
|
||||
python3.12
|
1
.venv/bin/python3.12
Symbolic link
1
.venv/bin/python3.12
Symbolic link
@ -0,0 +1 @@
|
||||
/Library/Frameworks/Python.framework/Versions/3.12/bin/python3.12
|
BIN
.venv/bin/uwsgi
Executable file
BIN
.venv/bin/uwsgi
Executable file
Binary file not shown.
163
.venv/lib/python3.12/site-packages/IPython/__init__.py
Normal file
163
.venv/lib/python3.12/site-packages/IPython/__init__.py
Normal file
@ -0,0 +1,163 @@
|
||||
# PYTHON_ARGCOMPLETE_OK
|
||||
"""
|
||||
IPython: tools for interactive and parallel computing in Python.
|
||||
|
||||
https://ipython.org
|
||||
"""
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (c) 2008-2011, IPython Development Team.
|
||||
# Copyright (c) 2001-2007, Fernando Perez <fernando.perez@colorado.edu>
|
||||
# Copyright (c) 2001, Janko Hauser <jhauser@zscout.de>
|
||||
# Copyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu>
|
||||
#
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Imports
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
import sys
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Setup everything
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
# Don't forget to also update setup.py when this changes!
|
||||
if sys.version_info < (3, 10):
|
||||
raise ImportError(
|
||||
"""
|
||||
IPython 8.19+ supports Python 3.10 and above, following SPEC0.
|
||||
IPython 8.13+ supports Python 3.9 and above, following NEP 29.
|
||||
IPython 8.0-8.12 supports Python 3.8 and above, following NEP 29.
|
||||
When using Python 2.7, please install IPython 5.x LTS Long Term Support version.
|
||||
Python 3.3 and 3.4 were supported up to IPython 6.x.
|
||||
Python 3.5 was supported with IPython 7.0 to 7.9.
|
||||
Python 3.6 was supported with IPython up to 7.16.
|
||||
Python 3.7 was still supported with the 7.x branch.
|
||||
|
||||
See IPython `README.rst` file for more information:
|
||||
|
||||
https://github.com/ipython/ipython/blob/main/README.rst
|
||||
|
||||
"""
|
||||
)
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Setup the top level names
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
from .core.getipython import get_ipython
|
||||
from .core import release
|
||||
from .core.application import Application
|
||||
from .terminal.embed import embed
|
||||
|
||||
from .core.interactiveshell import InteractiveShell
|
||||
from .utils.sysinfo import sys_info
|
||||
from .utils.frame import extract_module_locals
|
||||
|
||||
__all__ = ["start_ipython", "embed", "start_kernel", "embed_kernel"]
|
||||
|
||||
# Release data
|
||||
__author__ = '%s <%s>' % (release.author, release.author_email)
|
||||
__license__ = release.license
|
||||
__version__ = release.version
|
||||
version_info = release.version_info
|
||||
# list of CVEs that should have been patched in this release.
|
||||
# this is informational and should not be relied upon.
|
||||
__patched_cves__ = {"CVE-2022-21699", "CVE-2023-24816"}
|
||||
|
||||
|
||||
def embed_kernel(module=None, local_ns=None, **kwargs):
|
||||
"""Embed and start an IPython kernel in a given scope.
|
||||
|
||||
If you don't want the kernel to initialize the namespace
|
||||
from the scope of the surrounding function,
|
||||
and/or you want to load full IPython configuration,
|
||||
you probably want `IPython.start_kernel()` instead.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
module : types.ModuleType, optional
|
||||
The module to load into IPython globals (default: caller)
|
||||
local_ns : dict, optional
|
||||
The namespace to load into IPython user namespace (default: caller)
|
||||
**kwargs : various, optional
|
||||
Further keyword args are relayed to the IPKernelApp constructor,
|
||||
such as `config`, a traitlets :class:`Config` object (see :ref:`configure_start_ipython`),
|
||||
allowing configuration of the kernel (see :ref:`kernel_options`). Will only have an effect
|
||||
on the first embed_kernel call for a given process.
|
||||
"""
|
||||
|
||||
(caller_module, caller_locals) = extract_module_locals(1)
|
||||
if module is None:
|
||||
module = caller_module
|
||||
if local_ns is None:
|
||||
local_ns = caller_locals
|
||||
|
||||
# Only import .zmq when we really need it
|
||||
from ipykernel.embed import embed_kernel as real_embed_kernel
|
||||
real_embed_kernel(module=module, local_ns=local_ns, **kwargs)
|
||||
|
||||
def start_ipython(argv=None, **kwargs):
|
||||
"""Launch a normal IPython instance (as opposed to embedded)
|
||||
|
||||
`IPython.embed()` puts a shell in a particular calling scope,
|
||||
such as a function or method for debugging purposes,
|
||||
which is often not desirable.
|
||||
|
||||
`start_ipython()` does full, regular IPython initialization,
|
||||
including loading startup files, configuration, etc.
|
||||
much of which is skipped by `embed()`.
|
||||
|
||||
This is a public API method, and will survive implementation changes.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
argv : list or None, optional
|
||||
If unspecified or None, IPython will parse command-line options from sys.argv.
|
||||
To prevent any command-line parsing, pass an empty list: `argv=[]`.
|
||||
user_ns : dict, optional
|
||||
specify this dictionary to initialize the IPython user namespace with particular values.
|
||||
**kwargs : various, optional
|
||||
Any other kwargs will be passed to the Application constructor,
|
||||
such as `config`, a traitlets :class:`Config` object (see :ref:`configure_start_ipython`),
|
||||
allowing configuration of the instance (see :ref:`terminal_options`).
|
||||
"""
|
||||
from IPython.terminal.ipapp import launch_new_instance
|
||||
return launch_new_instance(argv=argv, **kwargs)
|
||||
|
||||
def start_kernel(argv=None, **kwargs):
|
||||
"""Launch a normal IPython kernel instance (as opposed to embedded)
|
||||
|
||||
`IPython.embed_kernel()` puts a shell in a particular calling scope,
|
||||
such as a function or method for debugging purposes,
|
||||
which is often not desirable.
|
||||
|
||||
`start_kernel()` does full, regular IPython initialization,
|
||||
including loading startup files, configuration, etc.
|
||||
much of which is skipped by `embed_kernel()`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
argv : list or None, optional
|
||||
If unspecified or None, IPython will parse command-line options from sys.argv.
|
||||
To prevent any command-line parsing, pass an empty list: `argv=[]`.
|
||||
user_ns : dict, optional
|
||||
specify this dictionary to initialize the IPython user namespace with particular values.
|
||||
**kwargs : various, optional
|
||||
Any other kwargs will be passed to the Application constructor,
|
||||
such as `config`, a traitlets :class:`Config` object (see :ref:`configure_start_ipython`),
|
||||
allowing configuration of the kernel (see :ref:`kernel_options`).
|
||||
"""
|
||||
import warnings
|
||||
|
||||
warnings.warn(
|
||||
"start_kernel is deprecated since IPython 8.0, use from `ipykernel.kernelapp.launch_new_instance`",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
from ipykernel.kernelapp import launch_new_instance
|
||||
return launch_new_instance(argv=argv, **kwargs)
|
15
.venv/lib/python3.12/site-packages/IPython/__main__.py
Normal file
15
.venv/lib/python3.12/site-packages/IPython/__main__.py
Normal file
@ -0,0 +1,15 @@
|
||||
# PYTHON_ARGCOMPLETE_OK
|
||||
# encoding: utf-8
|
||||
"""Terminal-based IPython entry point.
|
||||
"""
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2012, IPython Development Team.
|
||||
#
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
from IPython import start_ipython
|
||||
|
||||
start_ipython()
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
87
.venv/lib/python3.12/site-packages/IPython/conftest.py
Normal file
87
.venv/lib/python3.12/site-packages/IPython/conftest.py
Normal file
@ -0,0 +1,87 @@
|
||||
import builtins
|
||||
import inspect
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import sys
|
||||
import types
|
||||
|
||||
import pytest
|
||||
|
||||
# Must register before it gets imported
|
||||
pytest.register_assert_rewrite("IPython.testing.tools")
|
||||
|
||||
from .testing import tools
|
||||
|
||||
|
||||
def pytest_collection_modifyitems(items):
|
||||
"""This function is automatically run by pytest passing all collected test
|
||||
functions.
|
||||
|
||||
We use it to add asyncio marker to all async tests and assert we don't use
|
||||
test functions that are async generators which wouldn't make sense.
|
||||
"""
|
||||
for item in items:
|
||||
if inspect.iscoroutinefunction(item.obj):
|
||||
item.add_marker("asyncio")
|
||||
assert not inspect.isasyncgenfunction(item.obj)
|
||||
|
||||
|
||||
def get_ipython():
|
||||
from .terminal.interactiveshell import TerminalInteractiveShell
|
||||
if TerminalInteractiveShell._instance:
|
||||
return TerminalInteractiveShell.instance()
|
||||
|
||||
config = tools.default_config()
|
||||
config.TerminalInteractiveShell.simple_prompt = True
|
||||
|
||||
# Create and initialize our test-friendly IPython instance.
|
||||
shell = TerminalInteractiveShell.instance(config=config)
|
||||
return shell
|
||||
|
||||
|
||||
@pytest.fixture(scope='session', autouse=True)
|
||||
def work_path():
|
||||
path = pathlib.Path("./tmp-ipython-pytest-profiledir")
|
||||
os.environ["IPYTHONDIR"] = str(path.absolute())
|
||||
if path.exists():
|
||||
raise ValueError('IPython dir temporary path already exists ! Did previous test run exit successfully ?')
|
||||
path.mkdir()
|
||||
yield
|
||||
shutil.rmtree(str(path.resolve()))
|
||||
|
||||
|
||||
def nopage(strng, start=0, screen_lines=0, pager_cmd=None):
|
||||
if isinstance(strng, dict):
|
||||
strng = strng.get("text/plain", "")
|
||||
print(strng)
|
||||
|
||||
|
||||
def xsys(self, cmd):
|
||||
"""Replace the default system call with a capturing one for doctest.
|
||||
"""
|
||||
# We use getoutput, but we need to strip it because pexpect captures
|
||||
# the trailing newline differently from commands.getoutput
|
||||
print(self.getoutput(cmd, split=False, depth=1).rstrip(), end="", file=sys.stdout)
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
# for things to work correctly we would need this as a session fixture;
|
||||
# unfortunately this will fail on some test that get executed as _collection_
|
||||
# time (before the fixture run), in particular parametrized test that contain
|
||||
# yields. so for now execute at import time.
|
||||
#@pytest.fixture(autouse=True, scope='session')
|
||||
def inject():
|
||||
|
||||
builtins.get_ipython = get_ipython
|
||||
builtins._ip = get_ipython()
|
||||
builtins.ip = get_ipython()
|
||||
builtins.ip.system = types.MethodType(xsys, ip)
|
||||
builtins.ip.builtin_trap.activate()
|
||||
from .core import page
|
||||
|
||||
page.pager_page = nopage
|
||||
# yield
|
||||
|
||||
|
||||
inject()
|
12
.venv/lib/python3.12/site-packages/IPython/consoleapp.py
Normal file
12
.venv/lib/python3.12/site-packages/IPython/consoleapp.py
Normal file
@ -0,0 +1,12 @@
|
||||
"""
|
||||
Shim to maintain backwards compatibility with old IPython.consoleapp imports.
|
||||
"""
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from warnings import warn
|
||||
|
||||
warn("The `IPython.consoleapp` package has been deprecated since IPython 4.0."
|
||||
"You should import from jupyter_client.consoleapp instead.", stacklevel=2)
|
||||
|
||||
from jupyter_client.consoleapp import *
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
267
.venv/lib/python3.12/site-packages/IPython/core/alias.py
Normal file
267
.venv/lib/python3.12/site-packages/IPython/core/alias.py
Normal file
@ -0,0 +1,267 @@
|
||||
# encoding: utf-8
|
||||
"""
|
||||
System command aliases.
|
||||
|
||||
Authors:
|
||||
|
||||
* Fernando Perez
|
||||
* Brian Granger
|
||||
"""
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (C) 2008-2011 The IPython Development Team
|
||||
#
|
||||
# Distributed under the terms of the BSD License.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Imports
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from traitlets.config.configurable import Configurable
|
||||
from .error import UsageError
|
||||
|
||||
from traitlets import List, Instance
|
||||
from logging import error
|
||||
|
||||
import typing as t
|
||||
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Utilities
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
# This is used as the pattern for calls to split_user_input.
|
||||
shell_line_split = re.compile(r'^(\s*)()(\S+)(.*$)')
|
||||
|
||||
def default_aliases() -> t.List[t.Tuple[str, str]]:
|
||||
"""Return list of shell aliases to auto-define.
|
||||
"""
|
||||
# Note: the aliases defined here should be safe to use on a kernel
|
||||
# regardless of what frontend it is attached to. Frontends that use a
|
||||
# kernel in-process can define additional aliases that will only work in
|
||||
# their case. For example, things like 'less' or 'clear' that manipulate
|
||||
# the terminal should NOT be declared here, as they will only work if the
|
||||
# kernel is running inside a true terminal, and not over the network.
|
||||
|
||||
if os.name == 'posix':
|
||||
default_aliases = [('mkdir', 'mkdir'), ('rmdir', 'rmdir'),
|
||||
('mv', 'mv'), ('rm', 'rm'), ('cp', 'cp'),
|
||||
('cat', 'cat'),
|
||||
]
|
||||
# Useful set of ls aliases. The GNU and BSD options are a little
|
||||
# different, so we make aliases that provide as similar as possible
|
||||
# behavior in ipython, by passing the right flags for each platform
|
||||
if sys.platform.startswith('linux'):
|
||||
ls_aliases = [('ls', 'ls -F --color'),
|
||||
# long ls
|
||||
('ll', 'ls -F -o --color'),
|
||||
# ls normal files only
|
||||
('lf', 'ls -F -o --color %l | grep ^-'),
|
||||
# ls symbolic links
|
||||
('lk', 'ls -F -o --color %l | grep ^l'),
|
||||
# directories or links to directories,
|
||||
('ldir', 'ls -F -o --color %l | grep /$'),
|
||||
# things which are executable
|
||||
('lx', 'ls -F -o --color %l | grep ^-..x'),
|
||||
]
|
||||
elif sys.platform.startswith('openbsd') or sys.platform.startswith('netbsd'):
|
||||
# OpenBSD, NetBSD. The ls implementation on these platforms do not support
|
||||
# the -G switch and lack the ability to use colorized output.
|
||||
ls_aliases = [('ls', 'ls -F'),
|
||||
# long ls
|
||||
('ll', 'ls -F -l'),
|
||||
# ls normal files only
|
||||
('lf', 'ls -F -l %l | grep ^-'),
|
||||
# ls symbolic links
|
||||
('lk', 'ls -F -l %l | grep ^l'),
|
||||
# directories or links to directories,
|
||||
('ldir', 'ls -F -l %l | grep /$'),
|
||||
# things which are executable
|
||||
('lx', 'ls -F -l %l | grep ^-..x'),
|
||||
]
|
||||
else:
|
||||
# BSD, OSX, etc.
|
||||
ls_aliases = [('ls', 'ls -F -G'),
|
||||
# long ls
|
||||
('ll', 'ls -F -l -G'),
|
||||
# ls normal files only
|
||||
('lf', 'ls -F -l -G %l | grep ^-'),
|
||||
# ls symbolic links
|
||||
('lk', 'ls -F -l -G %l | grep ^l'),
|
||||
# directories or links to directories,
|
||||
('ldir', 'ls -F -G -l %l | grep /$'),
|
||||
# things which are executable
|
||||
('lx', 'ls -F -l -G %l | grep ^-..x'),
|
||||
]
|
||||
default_aliases = default_aliases + ls_aliases
|
||||
elif os.name in ['nt', 'dos']:
|
||||
default_aliases = [('ls', 'dir /on'),
|
||||
('ddir', 'dir /ad /on'), ('ldir', 'dir /ad /on'),
|
||||
('mkdir', 'mkdir'), ('rmdir', 'rmdir'),
|
||||
('echo', 'echo'), ('ren', 'ren'), ('copy', 'copy'),
|
||||
]
|
||||
else:
|
||||
default_aliases = []
|
||||
|
||||
return default_aliases
|
||||
|
||||
|
||||
class AliasError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidAliasError(AliasError):
|
||||
pass
|
||||
|
||||
class Alias(object):
|
||||
"""Callable object storing the details of one alias.
|
||||
|
||||
Instances are registered as magic functions to allow use of aliases.
|
||||
"""
|
||||
|
||||
# Prepare blacklist
|
||||
blacklist = {'cd','popd','pushd','dhist','alias','unalias'}
|
||||
|
||||
def __init__(self, shell, name, cmd):
|
||||
self.shell = shell
|
||||
self.name = name
|
||||
self.cmd = cmd
|
||||
self.__doc__ = "Alias for `!{}`".format(cmd)
|
||||
self.nargs = self.validate()
|
||||
|
||||
def validate(self):
|
||||
"""Validate the alias, and return the number of arguments."""
|
||||
if self.name in self.blacklist:
|
||||
raise InvalidAliasError("The name %s can't be aliased "
|
||||
"because it is a keyword or builtin." % self.name)
|
||||
try:
|
||||
caller = self.shell.magics_manager.magics['line'][self.name]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
if not isinstance(caller, Alias):
|
||||
raise InvalidAliasError("The name %s can't be aliased "
|
||||
"because it is another magic command." % self.name)
|
||||
|
||||
if not (isinstance(self.cmd, str)):
|
||||
raise InvalidAliasError("An alias command must be a string, "
|
||||
"got: %r" % self.cmd)
|
||||
|
||||
nargs = self.cmd.count('%s') - self.cmd.count('%%s')
|
||||
|
||||
if (nargs > 0) and (self.cmd.find('%l') >= 0):
|
||||
raise InvalidAliasError('The %s and %l specifiers are mutually '
|
||||
'exclusive in alias definitions.')
|
||||
|
||||
return nargs
|
||||
|
||||
def __repr__(self):
|
||||
return "<alias {} for {!r}>".format(self.name, self.cmd)
|
||||
|
||||
def __call__(self, rest=''):
|
||||
cmd = self.cmd
|
||||
nargs = self.nargs
|
||||
# Expand the %l special to be the user's input line
|
||||
if cmd.find('%l') >= 0:
|
||||
cmd = cmd.replace('%l', rest)
|
||||
rest = ''
|
||||
|
||||
if nargs==0:
|
||||
if cmd.find('%%s') >= 1:
|
||||
cmd = cmd.replace('%%s', '%s')
|
||||
# Simple, argument-less aliases
|
||||
cmd = '%s %s' % (cmd, rest)
|
||||
else:
|
||||
# Handle aliases with positional arguments
|
||||
args = rest.split(None, nargs)
|
||||
if len(args) < nargs:
|
||||
raise UsageError('Alias <%s> requires %s arguments, %s given.' %
|
||||
(self.name, nargs, len(args)))
|
||||
cmd = '%s %s' % (cmd % tuple(args[:nargs]),' '.join(args[nargs:]))
|
||||
|
||||
self.shell.system(cmd)
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Main AliasManager class
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
class AliasManager(Configurable):
|
||||
default_aliases: List = List(default_aliases()).tag(config=True)
|
||||
user_aliases: List = List(default_value=[]).tag(config=True)
|
||||
shell = Instance(
|
||||
"IPython.core.interactiveshell.InteractiveShellABC", allow_none=True
|
||||
)
|
||||
|
||||
def __init__(self, shell=None, **kwargs):
|
||||
super(AliasManager, self).__init__(shell=shell, **kwargs)
|
||||
# For convenient access
|
||||
if self.shell is not None:
|
||||
self.linemagics = self.shell.magics_manager.magics["line"]
|
||||
self.init_aliases()
|
||||
|
||||
def init_aliases(self):
|
||||
# Load default & user aliases
|
||||
for name, cmd in self.default_aliases + self.user_aliases:
|
||||
if (
|
||||
cmd.startswith("ls ")
|
||||
and self.shell is not None
|
||||
and self.shell.colors == "NoColor"
|
||||
):
|
||||
cmd = cmd.replace(" --color", "")
|
||||
self.soft_define_alias(name, cmd)
|
||||
|
||||
@property
|
||||
def aliases(self):
|
||||
return [(n, func.cmd) for (n, func) in self.linemagics.items()
|
||||
if isinstance(func, Alias)]
|
||||
|
||||
def soft_define_alias(self, name, cmd):
|
||||
"""Define an alias, but don't raise on an AliasError."""
|
||||
try:
|
||||
self.define_alias(name, cmd)
|
||||
except AliasError as e:
|
||||
error("Invalid alias: %s" % e)
|
||||
|
||||
def define_alias(self, name, cmd):
|
||||
"""Define a new alias after validating it.
|
||||
|
||||
This will raise an :exc:`AliasError` if there are validation
|
||||
problems.
|
||||
"""
|
||||
caller = Alias(shell=self.shell, name=name, cmd=cmd)
|
||||
self.shell.magics_manager.register_function(caller, magic_kind='line',
|
||||
magic_name=name)
|
||||
|
||||
def get_alias(self, name):
|
||||
"""Return an alias, or None if no alias by that name exists."""
|
||||
aname = self.linemagics.get(name, None)
|
||||
return aname if isinstance(aname, Alias) else None
|
||||
|
||||
def is_alias(self, name):
|
||||
"""Return whether or not a given name has been defined as an alias"""
|
||||
return self.get_alias(name) is not None
|
||||
|
||||
def undefine_alias(self, name):
|
||||
if self.is_alias(name):
|
||||
del self.linemagics[name]
|
||||
else:
|
||||
raise ValueError('%s is not an alias' % name)
|
||||
|
||||
def clear_aliases(self):
|
||||
for name, _ in self.aliases:
|
||||
self.undefine_alias(name)
|
||||
|
||||
def retrieve_alias(self, name):
|
||||
"""Retrieve the command to which an alias expands."""
|
||||
caller = self.get_alias(name)
|
||||
if caller:
|
||||
return caller.cmd
|
||||
else:
|
||||
raise ValueError('%s is not an alias' % name)
|
492
.venv/lib/python3.12/site-packages/IPython/core/application.py
Normal file
492
.venv/lib/python3.12/site-packages/IPython/core/application.py
Normal file
@ -0,0 +1,492 @@
|
||||
# encoding: utf-8
|
||||
"""
|
||||
An application for IPython.
|
||||
|
||||
All top-level applications should use the classes in this module for
|
||||
handling configuration and creating configurables.
|
||||
|
||||
The job of an :class:`Application` is to create the master configuration
|
||||
object and then create the configurable objects, passing the config to them.
|
||||
"""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import atexit
|
||||
from copy import deepcopy
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from traitlets.config.application import Application, catch_config_error
|
||||
from traitlets.config.loader import ConfigFileNotFound, PyFileConfigLoader
|
||||
from IPython.core import release, crashhandler
|
||||
from IPython.core.profiledir import ProfileDir, ProfileDirError
|
||||
from IPython.paths import get_ipython_dir, get_ipython_package_dir
|
||||
from IPython.utils.path import ensure_dir_exists
|
||||
from traitlets import (
|
||||
List, Unicode, Type, Bool, Set, Instance, Undefined,
|
||||
default, observe,
|
||||
)
|
||||
|
||||
if os.name == "nt":
|
||||
programdata = os.environ.get("PROGRAMDATA", None)
|
||||
if programdata is not None:
|
||||
SYSTEM_CONFIG_DIRS = [str(Path(programdata) / "ipython")]
|
||||
else: # PROGRAMDATA is not defined by default on XP.
|
||||
SYSTEM_CONFIG_DIRS = []
|
||||
else:
|
||||
SYSTEM_CONFIG_DIRS = [
|
||||
"/usr/local/etc/ipython",
|
||||
"/etc/ipython",
|
||||
]
|
||||
|
||||
|
||||
ENV_CONFIG_DIRS = []
|
||||
_env_config_dir = os.path.join(sys.prefix, 'etc', 'ipython')
|
||||
if _env_config_dir not in SYSTEM_CONFIG_DIRS:
|
||||
# only add ENV_CONFIG if sys.prefix is not already included
|
||||
ENV_CONFIG_DIRS.append(_env_config_dir)
|
||||
|
||||
|
||||
_envvar = os.environ.get('IPYTHON_SUPPRESS_CONFIG_ERRORS')
|
||||
if _envvar in {None, ''}:
|
||||
IPYTHON_SUPPRESS_CONFIG_ERRORS = None
|
||||
else:
|
||||
if _envvar.lower() in {'1','true'}:
|
||||
IPYTHON_SUPPRESS_CONFIG_ERRORS = True
|
||||
elif _envvar.lower() in {'0','false'} :
|
||||
IPYTHON_SUPPRESS_CONFIG_ERRORS = False
|
||||
else:
|
||||
sys.exit("Unsupported value for environment variable: 'IPYTHON_SUPPRESS_CONFIG_ERRORS' is set to '%s' which is none of {'0', '1', 'false', 'true', ''}."% _envvar )
|
||||
|
||||
# aliases and flags
|
||||
|
||||
base_aliases = {}
|
||||
if isinstance(Application.aliases, dict):
|
||||
# traitlets 5
|
||||
base_aliases.update(Application.aliases)
|
||||
base_aliases.update(
|
||||
{
|
||||
"profile-dir": "ProfileDir.location",
|
||||
"profile": "BaseIPythonApplication.profile",
|
||||
"ipython-dir": "BaseIPythonApplication.ipython_dir",
|
||||
"log-level": "Application.log_level",
|
||||
"config": "BaseIPythonApplication.extra_config_file",
|
||||
}
|
||||
)
|
||||
|
||||
base_flags = dict()
|
||||
if isinstance(Application.flags, dict):
|
||||
# traitlets 5
|
||||
base_flags.update(Application.flags)
|
||||
base_flags.update(
|
||||
dict(
|
||||
debug=(
|
||||
{"Application": {"log_level": logging.DEBUG}},
|
||||
"set log level to logging.DEBUG (maximize logging output)",
|
||||
),
|
||||
quiet=(
|
||||
{"Application": {"log_level": logging.CRITICAL}},
|
||||
"set log level to logging.CRITICAL (minimize logging output)",
|
||||
),
|
||||
init=(
|
||||
{
|
||||
"BaseIPythonApplication": {
|
||||
"copy_config_files": True,
|
||||
"auto_create": True,
|
||||
}
|
||||
},
|
||||
"""Initialize profile with default config files. This is equivalent
|
||||
to running `ipython profile create <profile>` prior to startup.
|
||||
""",
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class ProfileAwareConfigLoader(PyFileConfigLoader):
|
||||
"""A Python file config loader that is aware of IPython profiles."""
|
||||
def load_subconfig(self, fname, path=None, profile=None):
|
||||
if profile is not None:
|
||||
try:
|
||||
profile_dir = ProfileDir.find_profile_dir_by_name(
|
||||
get_ipython_dir(),
|
||||
profile,
|
||||
)
|
||||
except ProfileDirError:
|
||||
return
|
||||
path = profile_dir.location
|
||||
return super(ProfileAwareConfigLoader, self).load_subconfig(fname, path=path)
|
||||
|
||||
class BaseIPythonApplication(Application):
|
||||
name = "ipython"
|
||||
description = "IPython: an enhanced interactive Python shell."
|
||||
version = Unicode(release.version)
|
||||
|
||||
aliases = base_aliases
|
||||
flags = base_flags
|
||||
classes = List([ProfileDir])
|
||||
|
||||
# enable `load_subconfig('cfg.py', profile='name')`
|
||||
python_config_loader_class = ProfileAwareConfigLoader
|
||||
|
||||
# Track whether the config_file has changed,
|
||||
# because some logic happens only if we aren't using the default.
|
||||
config_file_specified = Set()
|
||||
|
||||
config_file_name = Unicode()
|
||||
@default('config_file_name')
|
||||
def _config_file_name_default(self):
|
||||
return self.name.replace('-','_') + u'_config.py'
|
||||
@observe('config_file_name')
|
||||
def _config_file_name_changed(self, change):
|
||||
if change['new'] != change['old']:
|
||||
self.config_file_specified.add(change['new'])
|
||||
|
||||
# The directory that contains IPython's builtin profiles.
|
||||
builtin_profile_dir = Unicode(
|
||||
os.path.join(get_ipython_package_dir(), u'config', u'profile', u'default')
|
||||
)
|
||||
|
||||
config_file_paths = List(Unicode())
|
||||
@default('config_file_paths')
|
||||
def _config_file_paths_default(self):
|
||||
return []
|
||||
|
||||
extra_config_file = Unicode(
|
||||
help="""Path to an extra config file to load.
|
||||
|
||||
If specified, load this config file in addition to any other IPython config.
|
||||
""").tag(config=True)
|
||||
@observe('extra_config_file')
|
||||
def _extra_config_file_changed(self, change):
|
||||
old = change['old']
|
||||
new = change['new']
|
||||
try:
|
||||
self.config_files.remove(old)
|
||||
except ValueError:
|
||||
pass
|
||||
self.config_file_specified.add(new)
|
||||
self.config_files.append(new)
|
||||
|
||||
profile = Unicode(u'default',
|
||||
help="""The IPython profile to use."""
|
||||
).tag(config=True)
|
||||
|
||||
@observe('profile')
|
||||
def _profile_changed(self, change):
|
||||
self.builtin_profile_dir = os.path.join(
|
||||
get_ipython_package_dir(), u'config', u'profile', change['new']
|
||||
)
|
||||
|
||||
add_ipython_dir_to_sys_path = Bool(
|
||||
False,
|
||||
"""Should the IPython profile directory be added to sys path ?
|
||||
|
||||
This option was non-existing before IPython 8.0, and ipython_dir was added to
|
||||
sys path to allow import of extensions present there. This was historical
|
||||
baggage from when pip did not exist. This now default to false,
|
||||
but can be set to true for legacy reasons.
|
||||
""",
|
||||
).tag(config=True)
|
||||
|
||||
ipython_dir = Unicode(
|
||||
help="""
|
||||
The name of the IPython directory. This directory is used for logging
|
||||
configuration (through profiles), history storage, etc. The default
|
||||
is usually $HOME/.ipython. This option can also be specified through
|
||||
the environment variable IPYTHONDIR.
|
||||
"""
|
||||
).tag(config=True)
|
||||
@default('ipython_dir')
|
||||
def _ipython_dir_default(self):
|
||||
d = get_ipython_dir()
|
||||
self._ipython_dir_changed({
|
||||
'name': 'ipython_dir',
|
||||
'old': d,
|
||||
'new': d,
|
||||
})
|
||||
return d
|
||||
|
||||
_in_init_profile_dir = False
|
||||
|
||||
profile_dir = Instance(ProfileDir, allow_none=True)
|
||||
|
||||
@default('profile_dir')
|
||||
def _profile_dir_default(self):
|
||||
# avoid recursion
|
||||
if self._in_init_profile_dir:
|
||||
return
|
||||
# profile_dir requested early, force initialization
|
||||
self.init_profile_dir()
|
||||
return self.profile_dir
|
||||
|
||||
overwrite = Bool(False,
|
||||
help="""Whether to overwrite existing config files when copying"""
|
||||
).tag(config=True)
|
||||
|
||||
auto_create = Bool(False,
|
||||
help="""Whether to create profile dir if it doesn't exist"""
|
||||
).tag(config=True)
|
||||
|
||||
config_files = List(Unicode())
|
||||
|
||||
@default('config_files')
|
||||
def _config_files_default(self):
|
||||
return [self.config_file_name]
|
||||
|
||||
copy_config_files = Bool(False,
|
||||
help="""Whether to install the default config files into the profile dir.
|
||||
If a new profile is being created, and IPython contains config files for that
|
||||
profile, then they will be staged into the new directory. Otherwise,
|
||||
default config files will be automatically generated.
|
||||
""").tag(config=True)
|
||||
|
||||
verbose_crash = Bool(False,
|
||||
help="""Create a massive crash report when IPython encounters what may be an
|
||||
internal error. The default is to append a short message to the
|
||||
usual traceback""").tag(config=True)
|
||||
|
||||
# The class to use as the crash handler.
|
||||
crash_handler_class = Type(crashhandler.CrashHandler)
|
||||
|
||||
@catch_config_error
|
||||
def __init__(self, **kwargs):
|
||||
super(BaseIPythonApplication, self).__init__(**kwargs)
|
||||
# ensure current working directory exists
|
||||
try:
|
||||
os.getcwd()
|
||||
except:
|
||||
# exit if cwd doesn't exist
|
||||
self.log.error("Current working directory doesn't exist.")
|
||||
self.exit(1)
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
# Various stages of Application creation
|
||||
#-------------------------------------------------------------------------
|
||||
|
||||
def init_crash_handler(self):
|
||||
"""Create a crash handler, typically setting sys.excepthook to it."""
|
||||
self.crash_handler = self.crash_handler_class(self)
|
||||
sys.excepthook = self.excepthook
|
||||
def unset_crashhandler():
|
||||
sys.excepthook = sys.__excepthook__
|
||||
atexit.register(unset_crashhandler)
|
||||
|
||||
def excepthook(self, etype, evalue, tb):
|
||||
"""this is sys.excepthook after init_crashhandler
|
||||
|
||||
set self.verbose_crash=True to use our full crashhandler, instead of
|
||||
a regular traceback with a short message (crash_handler_lite)
|
||||
"""
|
||||
|
||||
if self.verbose_crash:
|
||||
return self.crash_handler(etype, evalue, tb)
|
||||
else:
|
||||
return crashhandler.crash_handler_lite(etype, evalue, tb)
|
||||
|
||||
@observe('ipython_dir')
|
||||
def _ipython_dir_changed(self, change):
|
||||
old = change['old']
|
||||
new = change['new']
|
||||
if old is not Undefined:
|
||||
str_old = os.path.abspath(old)
|
||||
if str_old in sys.path:
|
||||
sys.path.remove(str_old)
|
||||
if self.add_ipython_dir_to_sys_path:
|
||||
str_path = os.path.abspath(new)
|
||||
sys.path.append(str_path)
|
||||
ensure_dir_exists(new)
|
||||
readme = os.path.join(new, "README")
|
||||
readme_src = os.path.join(
|
||||
get_ipython_package_dir(), "config", "profile", "README"
|
||||
)
|
||||
if not os.path.exists(readme) and os.path.exists(readme_src):
|
||||
shutil.copy(readme_src, readme)
|
||||
for d in ("extensions", "nbextensions"):
|
||||
path = os.path.join(new, d)
|
||||
try:
|
||||
ensure_dir_exists(path)
|
||||
except OSError as e:
|
||||
# this will not be EEXIST
|
||||
self.log.error("couldn't create path %s: %s", path, e)
|
||||
self.log.debug("IPYTHONDIR set to: %s", new)
|
||||
|
||||
def load_config_file(self, suppress_errors=IPYTHON_SUPPRESS_CONFIG_ERRORS):
|
||||
"""Load the config file.
|
||||
|
||||
By default, errors in loading config are handled, and a warning
|
||||
printed on screen. For testing, the suppress_errors option is set
|
||||
to False, so errors will make tests fail.
|
||||
|
||||
`suppress_errors` default value is to be `None` in which case the
|
||||
behavior default to the one of `traitlets.Application`.
|
||||
|
||||
The default value can be set :
|
||||
- to `False` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '0', or 'false' (case insensitive).
|
||||
- to `True` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '1' or 'true' (case insensitive).
|
||||
- to `None` by setting 'IPYTHON_SUPPRESS_CONFIG_ERRORS' environment variable to '' (empty string) or leaving it unset.
|
||||
|
||||
Any other value are invalid, and will make IPython exit with a non-zero return code.
|
||||
"""
|
||||
|
||||
|
||||
self.log.debug("Searching path %s for config files", self.config_file_paths)
|
||||
base_config = 'ipython_config.py'
|
||||
self.log.debug("Attempting to load config file: %s" %
|
||||
base_config)
|
||||
try:
|
||||
if suppress_errors is not None:
|
||||
old_value = Application.raise_config_file_errors
|
||||
Application.raise_config_file_errors = not suppress_errors;
|
||||
Application.load_config_file(
|
||||
self,
|
||||
base_config,
|
||||
path=self.config_file_paths
|
||||
)
|
||||
except ConfigFileNotFound:
|
||||
# ignore errors loading parent
|
||||
self.log.debug("Config file %s not found", base_config)
|
||||
pass
|
||||
if suppress_errors is not None:
|
||||
Application.raise_config_file_errors = old_value
|
||||
|
||||
for config_file_name in self.config_files:
|
||||
if not config_file_name or config_file_name == base_config:
|
||||
continue
|
||||
self.log.debug("Attempting to load config file: %s" %
|
||||
self.config_file_name)
|
||||
try:
|
||||
Application.load_config_file(
|
||||
self,
|
||||
config_file_name,
|
||||
path=self.config_file_paths
|
||||
)
|
||||
except ConfigFileNotFound:
|
||||
# Only warn if the default config file was NOT being used.
|
||||
if config_file_name in self.config_file_specified:
|
||||
msg = self.log.warning
|
||||
else:
|
||||
msg = self.log.debug
|
||||
msg("Config file not found, skipping: %s", config_file_name)
|
||||
except Exception:
|
||||
# For testing purposes.
|
||||
if not suppress_errors:
|
||||
raise
|
||||
self.log.warning("Error loading config file: %s" %
|
||||
self.config_file_name, exc_info=True)
|
||||
|
||||
def init_profile_dir(self):
|
||||
"""initialize the profile dir"""
|
||||
self._in_init_profile_dir = True
|
||||
if self.profile_dir is not None:
|
||||
# already ran
|
||||
return
|
||||
if 'ProfileDir.location' not in self.config:
|
||||
# location not specified, find by profile name
|
||||
try:
|
||||
p = ProfileDir.find_profile_dir_by_name(self.ipython_dir, self.profile, self.config)
|
||||
except ProfileDirError:
|
||||
# not found, maybe create it (always create default profile)
|
||||
if self.auto_create or self.profile == 'default':
|
||||
try:
|
||||
p = ProfileDir.create_profile_dir_by_name(self.ipython_dir, self.profile, self.config)
|
||||
except ProfileDirError:
|
||||
self.log.fatal("Could not create profile: %r"%self.profile)
|
||||
self.exit(1)
|
||||
else:
|
||||
self.log.info("Created profile dir: %r"%p.location)
|
||||
else:
|
||||
self.log.fatal("Profile %r not found."%self.profile)
|
||||
self.exit(1)
|
||||
else:
|
||||
self.log.debug("Using existing profile dir: %r", p.location)
|
||||
else:
|
||||
location = self.config.ProfileDir.location
|
||||
# location is fully specified
|
||||
try:
|
||||
p = ProfileDir.find_profile_dir(location, self.config)
|
||||
except ProfileDirError:
|
||||
# not found, maybe create it
|
||||
if self.auto_create:
|
||||
try:
|
||||
p = ProfileDir.create_profile_dir(location, self.config)
|
||||
except ProfileDirError:
|
||||
self.log.fatal("Could not create profile directory: %r"%location)
|
||||
self.exit(1)
|
||||
else:
|
||||
self.log.debug("Creating new profile dir: %r"%location)
|
||||
else:
|
||||
self.log.fatal("Profile directory %r not found."%location)
|
||||
self.exit(1)
|
||||
else:
|
||||
self.log.debug("Using existing profile dir: %r", p.location)
|
||||
# if profile_dir is specified explicitly, set profile name
|
||||
dir_name = os.path.basename(p.location)
|
||||
if dir_name.startswith('profile_'):
|
||||
self.profile = dir_name[8:]
|
||||
|
||||
self.profile_dir = p
|
||||
self.config_file_paths.append(p.location)
|
||||
self._in_init_profile_dir = False
|
||||
|
||||
def init_config_files(self):
|
||||
"""[optionally] copy default config files into profile dir."""
|
||||
self.config_file_paths.extend(ENV_CONFIG_DIRS)
|
||||
self.config_file_paths.extend(SYSTEM_CONFIG_DIRS)
|
||||
# copy config files
|
||||
path = Path(self.builtin_profile_dir)
|
||||
if self.copy_config_files:
|
||||
src = self.profile
|
||||
|
||||
cfg = self.config_file_name
|
||||
if path and (path / cfg).exists():
|
||||
self.log.warning(
|
||||
"Staging %r from %s into %r [overwrite=%s]"
|
||||
% (cfg, src, self.profile_dir.location, self.overwrite)
|
||||
)
|
||||
self.profile_dir.copy_config_file(cfg, path=path, overwrite=self.overwrite)
|
||||
else:
|
||||
self.stage_default_config_file()
|
||||
else:
|
||||
# Still stage *bundled* config files, but not generated ones
|
||||
# This is necessary for `ipython profile=sympy` to load the profile
|
||||
# on the first go
|
||||
files = path.glob("*.py")
|
||||
for fullpath in files:
|
||||
cfg = fullpath.name
|
||||
if self.profile_dir.copy_config_file(cfg, path=path, overwrite=False):
|
||||
# file was copied
|
||||
self.log.warning("Staging bundled %s from %s into %r"%(
|
||||
cfg, self.profile, self.profile_dir.location)
|
||||
)
|
||||
|
||||
|
||||
def stage_default_config_file(self):
|
||||
"""auto generate default config file, and stage it into the profile."""
|
||||
s = self.generate_config_file()
|
||||
config_file = Path(self.profile_dir.location) / self.config_file_name
|
||||
if self.overwrite or not config_file.exists():
|
||||
self.log.warning("Generating default config file: %r", (config_file))
|
||||
config_file.write_text(s, encoding="utf-8")
|
||||
|
||||
@catch_config_error
|
||||
def initialize(self, argv=None):
|
||||
# don't hook up crash handler before parsing command-line
|
||||
self.parse_command_line(argv)
|
||||
self.init_crash_handler()
|
||||
if self.subapp is not None:
|
||||
# stop here if subapp is taking over
|
||||
return
|
||||
# save a copy of CLI config to re-load after config files
|
||||
# so that it has highest priority
|
||||
cl_config = deepcopy(self.config)
|
||||
self.init_profile_dir()
|
||||
self.init_config_files()
|
||||
self.load_config_file()
|
||||
# enforce cl-opts override configfile opts:
|
||||
self.update_config(cl_config)
|
155
.venv/lib/python3.12/site-packages/IPython/core/async_helpers.py
Normal file
155
.venv/lib/python3.12/site-packages/IPython/core/async_helpers.py
Normal file
@ -0,0 +1,155 @@
|
||||
"""
|
||||
Async helper function that are invalid syntax on Python 3.5 and below.
|
||||
|
||||
This code is best effort, and may have edge cases not behaving as expected. In
|
||||
particular it contain a number of heuristics to detect whether code is
|
||||
effectively async and need to run in an event loop or not.
|
||||
|
||||
Some constructs (like top-level `return`, or `yield`) are taken care of
|
||||
explicitly to actually raise a SyntaxError and stay as close as possible to
|
||||
Python semantics.
|
||||
"""
|
||||
|
||||
import ast
|
||||
import asyncio
|
||||
import inspect
|
||||
from functools import wraps
|
||||
|
||||
_asyncio_event_loop = None
|
||||
|
||||
|
||||
def get_asyncio_loop():
|
||||
"""asyncio has deprecated get_event_loop
|
||||
|
||||
Replicate it here, with our desired semantics:
|
||||
|
||||
- always returns a valid, not-closed loop
|
||||
- not thread-local like asyncio's,
|
||||
because we only want one loop for IPython
|
||||
- if called from inside a coroutine (e.g. in ipykernel),
|
||||
return the running loop
|
||||
|
||||
.. versionadded:: 8.0
|
||||
"""
|
||||
try:
|
||||
return asyncio.get_running_loop()
|
||||
except RuntimeError:
|
||||
# not inside a coroutine,
|
||||
# track our own global
|
||||
pass
|
||||
|
||||
# not thread-local like asyncio's,
|
||||
# because we only track one event loop to run for IPython itself,
|
||||
# always in the main thread.
|
||||
global _asyncio_event_loop
|
||||
if _asyncio_event_loop is None or _asyncio_event_loop.is_closed():
|
||||
_asyncio_event_loop = asyncio.new_event_loop()
|
||||
return _asyncio_event_loop
|
||||
|
||||
|
||||
class _AsyncIORunner:
|
||||
def __call__(self, coro):
|
||||
"""
|
||||
Handler for asyncio autoawait
|
||||
"""
|
||||
return get_asyncio_loop().run_until_complete(coro)
|
||||
|
||||
def __str__(self):
|
||||
return "asyncio"
|
||||
|
||||
|
||||
_asyncio_runner = _AsyncIORunner()
|
||||
|
||||
|
||||
class _AsyncIOProxy:
|
||||
"""Proxy-object for an asyncio
|
||||
|
||||
Any coroutine methods will be wrapped in event_loop.run_
|
||||
"""
|
||||
|
||||
def __init__(self, obj, event_loop):
|
||||
self._obj = obj
|
||||
self._event_loop = event_loop
|
||||
|
||||
def __repr__(self):
|
||||
return f"<_AsyncIOProxy({self._obj!r})>"
|
||||
|
||||
def __getattr__(self, key):
|
||||
attr = getattr(self._obj, key)
|
||||
if inspect.iscoroutinefunction(attr):
|
||||
# if it's a coroutine method,
|
||||
# return a threadsafe wrapper onto the _current_ asyncio loop
|
||||
@wraps(attr)
|
||||
def _wrapped(*args, **kwargs):
|
||||
concurrent_future = asyncio.run_coroutine_threadsafe(
|
||||
attr(*args, **kwargs), self._event_loop
|
||||
)
|
||||
return asyncio.wrap_future(concurrent_future)
|
||||
|
||||
return _wrapped
|
||||
else:
|
||||
return attr
|
||||
|
||||
def __dir__(self):
|
||||
return dir(self._obj)
|
||||
|
||||
|
||||
def _curio_runner(coroutine):
|
||||
"""
|
||||
handler for curio autoawait
|
||||
"""
|
||||
import curio
|
||||
|
||||
return curio.run(coroutine)
|
||||
|
||||
|
||||
def _trio_runner(async_fn):
|
||||
import trio
|
||||
|
||||
async def loc(coro):
|
||||
"""
|
||||
We need the dummy no-op async def to protect from
|
||||
trio's internal. See https://github.com/python-trio/trio/issues/89
|
||||
"""
|
||||
return await coro
|
||||
|
||||
return trio.run(loc, async_fn)
|
||||
|
||||
|
||||
def _pseudo_sync_runner(coro):
|
||||
"""
|
||||
A runner that does not really allow async execution, and just advance the coroutine.
|
||||
|
||||
See discussion in https://github.com/python-trio/trio/issues/608,
|
||||
|
||||
Credit to Nathaniel Smith
|
||||
"""
|
||||
try:
|
||||
coro.send(None)
|
||||
except StopIteration as exc:
|
||||
return exc.value
|
||||
else:
|
||||
# TODO: do not raise but return an execution result with the right info.
|
||||
raise RuntimeError(
|
||||
"{coro_name!r} needs a real async loop".format(coro_name=coro.__name__)
|
||||
)
|
||||
|
||||
|
||||
def _should_be_async(cell: str) -> bool:
|
||||
"""Detect if a block of code need to be wrapped in an `async def`
|
||||
|
||||
Attempt to parse the block of code, it it compile we're fine.
|
||||
Otherwise we wrap if and try to compile.
|
||||
|
||||
If it works, assume it should be async. Otherwise Return False.
|
||||
|
||||
Not handled yet: If the block of code has a return statement as the top
|
||||
level, it will be seen as async. This is a know limitation.
|
||||
"""
|
||||
try:
|
||||
code = compile(
|
||||
cell, "<>", "exec", flags=getattr(ast, "PyCF_ALLOW_TOP_LEVEL_AWAIT", 0x0)
|
||||
)
|
||||
return inspect.CO_COROUTINE & code.co_flags == inspect.CO_COROUTINE
|
||||
except (SyntaxError, MemoryError):
|
||||
return False
|
70
.venv/lib/python3.12/site-packages/IPython/core/autocall.py
Normal file
70
.venv/lib/python3.12/site-packages/IPython/core/autocall.py
Normal file
@ -0,0 +1,70 @@
|
||||
# encoding: utf-8
|
||||
"""
|
||||
Autocall capabilities for IPython.core.
|
||||
|
||||
Authors:
|
||||
|
||||
* Brian Granger
|
||||
* Fernando Perez
|
||||
* Thomas Kluyver
|
||||
|
||||
Notes
|
||||
-----
|
||||
"""
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (C) 2008-2011 The IPython Development Team
|
||||
#
|
||||
# Distributed under the terms of the BSD License. The full license is in
|
||||
# the file COPYING, distributed as part of this software.
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Imports
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Code
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
class IPyAutocall(object):
|
||||
""" Instances of this class are always autocalled
|
||||
|
||||
This happens regardless of 'autocall' variable state. Use this to
|
||||
develop macro-like mechanisms.
|
||||
"""
|
||||
_ip = None
|
||||
rewrite = True
|
||||
def __init__(self, ip=None):
|
||||
self._ip = ip
|
||||
|
||||
def set_ip(self, ip):
|
||||
"""Will be used to set _ip point to current ipython instance b/f call
|
||||
|
||||
Override this method if you don't want this to happen.
|
||||
|
||||
"""
|
||||
self._ip = ip
|
||||
|
||||
|
||||
class ExitAutocall(IPyAutocall):
|
||||
"""An autocallable object which will be added to the user namespace so that
|
||||
exit, exit(), quit or quit() are all valid ways to close the shell."""
|
||||
rewrite = False
|
||||
|
||||
def __call__(self):
|
||||
self._ip.ask_exit()
|
||||
|
||||
class ZMQExitAutocall(ExitAutocall):
|
||||
"""Exit IPython. Autocallable, so it needn't be explicitly called.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
keep_kernel : bool
|
||||
If True, leave the kernel alive. Otherwise, tell the kernel to exit too
|
||||
(default).
|
||||
"""
|
||||
def __call__(self, keep_kernel=False):
|
||||
self._ip.keepkernel_on_exit = keep_kernel
|
||||
self._ip.ask_exit()
|
@ -0,0 +1,86 @@
|
||||
"""
|
||||
A context manager for managing things injected into :mod:`builtins`.
|
||||
"""
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
import builtins as builtin_mod
|
||||
|
||||
from traitlets.config.configurable import Configurable
|
||||
|
||||
from traitlets import Instance
|
||||
|
||||
|
||||
class __BuiltinUndefined(object): pass
|
||||
BuiltinUndefined = __BuiltinUndefined()
|
||||
|
||||
class __HideBuiltin(object): pass
|
||||
HideBuiltin = __HideBuiltin()
|
||||
|
||||
|
||||
class BuiltinTrap(Configurable):
|
||||
|
||||
shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
|
||||
allow_none=True)
|
||||
|
||||
def __init__(self, shell=None):
|
||||
super(BuiltinTrap, self).__init__(shell=shell, config=None)
|
||||
self._orig_builtins = {}
|
||||
# We define this to track if a single BuiltinTrap is nested.
|
||||
# Only turn off the trap when the outermost call to __exit__ is made.
|
||||
self._nested_level = 0
|
||||
self.shell = shell
|
||||
# builtins we always add - if set to HideBuiltin, they will just
|
||||
# be removed instead of being replaced by something else
|
||||
self.auto_builtins = {'exit': HideBuiltin,
|
||||
'quit': HideBuiltin,
|
||||
'get_ipython': self.shell.get_ipython,
|
||||
}
|
||||
|
||||
def __enter__(self):
|
||||
if self._nested_level == 0:
|
||||
self.activate()
|
||||
self._nested_level += 1
|
||||
# I return self, so callers can use add_builtin in a with clause.
|
||||
return self
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
if self._nested_level == 1:
|
||||
self.deactivate()
|
||||
self._nested_level -= 1
|
||||
# Returning False will cause exceptions to propagate
|
||||
return False
|
||||
|
||||
def add_builtin(self, key, value):
|
||||
"""Add a builtin and save the original."""
|
||||
bdict = builtin_mod.__dict__
|
||||
orig = bdict.get(key, BuiltinUndefined)
|
||||
if value is HideBuiltin:
|
||||
if orig is not BuiltinUndefined: #same as 'key in bdict'
|
||||
self._orig_builtins[key] = orig
|
||||
del bdict[key]
|
||||
else:
|
||||
self._orig_builtins[key] = orig
|
||||
bdict[key] = value
|
||||
|
||||
def remove_builtin(self, key, orig):
|
||||
"""Remove an added builtin and re-set the original."""
|
||||
if orig is BuiltinUndefined:
|
||||
del builtin_mod.__dict__[key]
|
||||
else:
|
||||
builtin_mod.__dict__[key] = orig
|
||||
|
||||
def activate(self):
|
||||
"""Store ipython references in the __builtin__ namespace."""
|
||||
|
||||
add_builtin = self.add_builtin
|
||||
for name, func in self.auto_builtins.items():
|
||||
add_builtin(name, func)
|
||||
|
||||
def deactivate(self):
|
||||
"""Remove any builtins which might have been added by add_builtins, or
|
||||
restore overwritten ones to their previous values."""
|
||||
remove_builtin = self.remove_builtin
|
||||
for key, val in self._orig_builtins.items():
|
||||
remove_builtin(key, val)
|
||||
self._orig_builtins.clear()
|
||||
self._builtins_added = False
|
214
.venv/lib/python3.12/site-packages/IPython/core/compilerop.py
Normal file
214
.venv/lib/python3.12/site-packages/IPython/core/compilerop.py
Normal file
@ -0,0 +1,214 @@
|
||||
"""Compiler tools with improved interactive support.
|
||||
|
||||
Provides compilation machinery similar to codeop, but with caching support so
|
||||
we can provide interactive tracebacks.
|
||||
|
||||
Authors
|
||||
-------
|
||||
* Robert Kern
|
||||
* Fernando Perez
|
||||
* Thomas Kluyver
|
||||
"""
|
||||
|
||||
# Note: though it might be more natural to name this module 'compiler', that
|
||||
# name is in the stdlib and name collisions with the stdlib tend to produce
|
||||
# weird problems (often with third-party tools).
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (C) 2010-2011 The IPython Development Team.
|
||||
#
|
||||
# Distributed under the terms of the BSD License.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Imports
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
# Stdlib imports
|
||||
import __future__
|
||||
from ast import PyCF_ONLY_AST
|
||||
import codeop
|
||||
import functools
|
||||
import hashlib
|
||||
import linecache
|
||||
import operator
|
||||
import time
|
||||
from contextlib import contextmanager
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Constants
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
# Roughly equal to PyCF_MASK | PyCF_MASK_OBSOLETE as defined in pythonrun.h,
|
||||
# this is used as a bitmask to extract future-related code flags.
|
||||
PyCF_MASK = functools.reduce(operator.or_,
|
||||
(getattr(__future__, fname).compiler_flag
|
||||
for fname in __future__.all_feature_names))
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Local utilities
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
def code_name(code, number=0):
|
||||
""" Compute a (probably) unique name for code for caching.
|
||||
|
||||
This now expects code to be unicode.
|
||||
"""
|
||||
hash_digest = hashlib.sha1(code.encode("utf-8")).hexdigest()
|
||||
# Include the number and 12 characters of the hash in the name. It's
|
||||
# pretty much impossible that in a single session we'll have collisions
|
||||
# even with truncated hashes, and the full one makes tracebacks too long
|
||||
return '<ipython-input-{0}-{1}>'.format(number, hash_digest[:12])
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Classes and functions
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
class CachingCompiler(codeop.Compile):
|
||||
"""A compiler that caches code compiled from interactive statements.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
codeop.Compile.__init__(self)
|
||||
|
||||
# Caching a dictionary { filename: execution_count } for nicely
|
||||
# rendered tracebacks. The filename corresponds to the filename
|
||||
# argument used for the builtins.compile function.
|
||||
self._filename_map = {}
|
||||
|
||||
def ast_parse(self, source, filename='<unknown>', symbol='exec'):
|
||||
"""Parse code to an AST with the current compiler flags active.
|
||||
|
||||
Arguments are exactly the same as ast.parse (in the standard library),
|
||||
and are passed to the built-in compile function."""
|
||||
return compile(source, filename, symbol, self.flags | PyCF_ONLY_AST, 1)
|
||||
|
||||
def reset_compiler_flags(self):
|
||||
"""Reset compiler flags to default state."""
|
||||
# This value is copied from codeop.Compile.__init__, so if that ever
|
||||
# changes, it will need to be updated.
|
||||
self.flags = codeop.PyCF_DONT_IMPLY_DEDENT
|
||||
|
||||
@property
|
||||
def compiler_flags(self):
|
||||
"""Flags currently active in the compilation process.
|
||||
"""
|
||||
return self.flags
|
||||
|
||||
def get_code_name(self, raw_code, transformed_code, number):
|
||||
"""Compute filename given the code, and the cell number.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
raw_code : str
|
||||
The raw cell code.
|
||||
transformed_code : str
|
||||
The executable Python source code to cache and compile.
|
||||
number : int
|
||||
A number which forms part of the code's name. Used for the execution
|
||||
counter.
|
||||
|
||||
Returns
|
||||
-------
|
||||
The computed filename.
|
||||
"""
|
||||
return code_name(transformed_code, number)
|
||||
|
||||
def format_code_name(self, name):
|
||||
"""Return a user-friendly label and name for a code block.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
name : str
|
||||
The name for the code block returned from get_code_name
|
||||
|
||||
Returns
|
||||
-------
|
||||
A (label, name) pair that can be used in tracebacks, or None if the default formatting should be used.
|
||||
"""
|
||||
if name in self._filename_map:
|
||||
return "Cell", "In[%s]" % self._filename_map[name]
|
||||
|
||||
def cache(self, transformed_code, number=0, raw_code=None):
|
||||
"""Make a name for a block of code, and cache the code.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
transformed_code : str
|
||||
The executable Python source code to cache and compile.
|
||||
number : int
|
||||
A number which forms part of the code's name. Used for the execution
|
||||
counter.
|
||||
raw_code : str
|
||||
The raw code before transformation, if None, set to `transformed_code`.
|
||||
|
||||
Returns
|
||||
-------
|
||||
The name of the cached code (as a string). Pass this as the filename
|
||||
argument to compilation, so that tracebacks are correctly hooked up.
|
||||
"""
|
||||
if raw_code is None:
|
||||
raw_code = transformed_code
|
||||
|
||||
name = self.get_code_name(raw_code, transformed_code, number)
|
||||
|
||||
# Save the execution count
|
||||
self._filename_map[name] = number
|
||||
|
||||
# Since Python 2.5, setting mtime to `None` means the lines will
|
||||
# never be removed by `linecache.checkcache`. This means all the
|
||||
# monkeypatching has *never* been necessary, since this code was
|
||||
# only added in 2010, at which point IPython had already stopped
|
||||
# supporting Python 2.4.
|
||||
#
|
||||
# Note that `linecache.clearcache` and `linecache.updatecache` may
|
||||
# still remove our code from the cache, but those show explicit
|
||||
# intent, and we should not try to interfere. Normally the former
|
||||
# is never called except when out of memory, and the latter is only
|
||||
# called for lines *not* in the cache.
|
||||
entry = (
|
||||
len(transformed_code),
|
||||
None,
|
||||
[line + "\n" for line in transformed_code.splitlines()],
|
||||
name,
|
||||
)
|
||||
linecache.cache[name] = entry
|
||||
return name
|
||||
|
||||
@contextmanager
|
||||
def extra_flags(self, flags):
|
||||
## bits that we'll set to 1
|
||||
turn_on_bits = ~self.flags & flags
|
||||
|
||||
|
||||
self.flags = self.flags | flags
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
# turn off only the bits we turned on so that something like
|
||||
# __future__ that set flags stays.
|
||||
self.flags &= ~turn_on_bits
|
||||
|
||||
|
||||
def check_linecache_ipython(*args):
|
||||
"""Deprecated since IPython 8.6. Call linecache.checkcache() directly.
|
||||
|
||||
It was already not necessary to call this function directly. If no
|
||||
CachingCompiler had been created, this function would fail badly. If
|
||||
an instance had been created, this function would've been monkeypatched
|
||||
into place.
|
||||
|
||||
As of IPython 8.6, the monkeypatching has gone away entirely. But there
|
||||
were still internal callers of this function, so maybe external callers
|
||||
also existed?
|
||||
"""
|
||||
import warnings
|
||||
|
||||
warnings.warn(
|
||||
"Deprecated Since IPython 8.6, Just call linecache.checkcache() directly.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
linecache.checkcache()
|
3421
.venv/lib/python3.12/site-packages/IPython/core/completer.py
Normal file
3421
.venv/lib/python3.12/site-packages/IPython/core/completer.py
Normal file
File diff suppressed because it is too large
Load Diff
382
.venv/lib/python3.12/site-packages/IPython/core/completerlib.py
Normal file
382
.venv/lib/python3.12/site-packages/IPython/core/completerlib.py
Normal file
@ -0,0 +1,382 @@
|
||||
# encoding: utf-8
|
||||
"""Implementations for various useful completers.
|
||||
|
||||
These are all loaded by default by IPython.
|
||||
"""
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (C) 2010-2011 The IPython Development Team.
|
||||
#
|
||||
# Distributed under the terms of the BSD License.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Imports
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
# Stdlib imports
|
||||
import glob
|
||||
import inspect
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from importlib import import_module
|
||||
from importlib.machinery import all_suffixes
|
||||
|
||||
|
||||
# Third-party imports
|
||||
from time import time
|
||||
from zipimport import zipimporter
|
||||
|
||||
# Our own imports
|
||||
from .completer import expand_user, compress_user
|
||||
from .error import TryNext
|
||||
from ..utils._process_common import arg_split
|
||||
|
||||
# FIXME: this should be pulled in with the right call via the component system
|
||||
from IPython import get_ipython
|
||||
|
||||
from typing import List
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Globals and constants
|
||||
#-----------------------------------------------------------------------------
|
||||
_suffixes = all_suffixes()
|
||||
|
||||
# Time in seconds after which the rootmodules will be stored permanently in the
|
||||
# ipython ip.db database (kept in the user's .ipython dir).
|
||||
TIMEOUT_STORAGE = 2
|
||||
|
||||
# Time in seconds after which we give up
|
||||
TIMEOUT_GIVEUP = 20
|
||||
|
||||
# Regular expression for the python import statement
|
||||
import_re = re.compile(r'(?P<name>[^\W\d]\w*?)'
|
||||
r'(?P<package>[/\\]__init__)?'
|
||||
r'(?P<suffix>%s)$' %
|
||||
r'|'.join(re.escape(s) for s in _suffixes))
|
||||
|
||||
# RE for the ipython %run command (python + ipython scripts)
|
||||
magic_run_re = re.compile(r'.*(\.ipy|\.ipynb|\.py[w]?)$')
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Local utilities
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
|
||||
def module_list(path: str) -> List[str]:
|
||||
"""
|
||||
Return the list containing the names of the modules available in the given
|
||||
folder.
|
||||
"""
|
||||
# sys.path has the cwd as an empty string, but isdir/listdir need it as '.'
|
||||
if path == '':
|
||||
path = '.'
|
||||
|
||||
# A few local constants to be used in loops below
|
||||
pjoin = os.path.join
|
||||
|
||||
if os.path.isdir(path):
|
||||
# Build a list of all files in the directory and all files
|
||||
# in its subdirectories. For performance reasons, do not
|
||||
# recurse more than one level into subdirectories.
|
||||
files: List[str] = []
|
||||
for root, dirs, nondirs in os.walk(path, followlinks=True):
|
||||
subdir = root[len(path)+1:]
|
||||
if subdir:
|
||||
files.extend(pjoin(subdir, f) for f in nondirs)
|
||||
dirs[:] = [] # Do not recurse into additional subdirectories.
|
||||
else:
|
||||
files.extend(nondirs)
|
||||
|
||||
else:
|
||||
try:
|
||||
files = list(zipimporter(path)._files.keys()) # type: ignore
|
||||
except Exception:
|
||||
files = []
|
||||
|
||||
# Build a list of modules which match the import_re regex.
|
||||
modules = []
|
||||
for f in files:
|
||||
m = import_re.match(f)
|
||||
if m:
|
||||
modules.append(m.group('name'))
|
||||
return list(set(modules))
|
||||
|
||||
|
||||
def get_root_modules():
|
||||
"""
|
||||
Returns a list containing the names of all the modules available in the
|
||||
folders of the pythonpath.
|
||||
|
||||
ip.db['rootmodules_cache'] maps sys.path entries to list of modules.
|
||||
"""
|
||||
ip = get_ipython()
|
||||
if ip is None:
|
||||
# No global shell instance to store cached list of modules.
|
||||
# Don't try to scan for modules every time.
|
||||
return list(sys.builtin_module_names)
|
||||
|
||||
if getattr(ip.db, "_mock", False):
|
||||
rootmodules_cache = {}
|
||||
else:
|
||||
rootmodules_cache = ip.db.get("rootmodules_cache", {})
|
||||
rootmodules = list(sys.builtin_module_names)
|
||||
start_time = time()
|
||||
store = False
|
||||
for path in sys.path:
|
||||
try:
|
||||
modules = rootmodules_cache[path]
|
||||
except KeyError:
|
||||
modules = module_list(path)
|
||||
try:
|
||||
modules.remove('__init__')
|
||||
except ValueError:
|
||||
pass
|
||||
if path not in ('', '.'): # cwd modules should not be cached
|
||||
rootmodules_cache[path] = modules
|
||||
if time() - start_time > TIMEOUT_STORAGE and not store:
|
||||
store = True
|
||||
print("\nCaching the list of root modules, please wait!")
|
||||
print("(This will only be done once - type '%rehashx' to "
|
||||
"reset cache!)\n")
|
||||
sys.stdout.flush()
|
||||
if time() - start_time > TIMEOUT_GIVEUP:
|
||||
print("This is taking too long, we give up.\n")
|
||||
return []
|
||||
rootmodules.extend(modules)
|
||||
if store:
|
||||
ip.db['rootmodules_cache'] = rootmodules_cache
|
||||
rootmodules = list(set(rootmodules))
|
||||
return rootmodules
|
||||
|
||||
|
||||
def is_importable(module, attr: str, only_modules) -> bool:
|
||||
if only_modules:
|
||||
try:
|
||||
mod = getattr(module, attr)
|
||||
except ModuleNotFoundError:
|
||||
# See gh-14434
|
||||
return False
|
||||
return inspect.ismodule(mod)
|
||||
else:
|
||||
return not(attr[:2] == '__' and attr[-2:] == '__')
|
||||
|
||||
def is_possible_submodule(module, attr):
|
||||
try:
|
||||
obj = getattr(module, attr)
|
||||
except AttributeError:
|
||||
# Is possibly an unimported submodule
|
||||
return True
|
||||
except TypeError:
|
||||
# https://github.com/ipython/ipython/issues/9678
|
||||
return False
|
||||
return inspect.ismodule(obj)
|
||||
|
||||
|
||||
def try_import(mod: str, only_modules=False) -> List[str]:
|
||||
"""
|
||||
Try to import given module and return list of potential completions.
|
||||
"""
|
||||
mod = mod.rstrip('.')
|
||||
try:
|
||||
m = import_module(mod)
|
||||
except:
|
||||
return []
|
||||
|
||||
m_is_init = '__init__' in (getattr(m, '__file__', '') or '')
|
||||
|
||||
completions = []
|
||||
if (not hasattr(m, '__file__')) or (not only_modules) or m_is_init:
|
||||
completions.extend( [attr for attr in dir(m) if
|
||||
is_importable(m, attr, only_modules)])
|
||||
|
||||
m_all = getattr(m, "__all__", [])
|
||||
if only_modules:
|
||||
completions.extend(attr for attr in m_all if is_possible_submodule(m, attr))
|
||||
else:
|
||||
completions.extend(m_all)
|
||||
|
||||
if m_is_init:
|
||||
file_ = m.__file__
|
||||
file_path = os.path.dirname(file_) # type: ignore
|
||||
if file_path is not None:
|
||||
completions.extend(module_list(file_path))
|
||||
completions_set = {c for c in completions if isinstance(c, str)}
|
||||
completions_set.discard('__init__')
|
||||
return list(completions_set)
|
||||
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Completion-related functions.
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
def quick_completer(cmd, completions):
|
||||
r""" Easily create a trivial completer for a command.
|
||||
|
||||
Takes either a list of completions, or all completions in string (that will
|
||||
be split on whitespace).
|
||||
|
||||
Example::
|
||||
|
||||
[d:\ipython]|1> import ipy_completers
|
||||
[d:\ipython]|2> ipy_completers.quick_completer('foo', ['bar','baz'])
|
||||
[d:\ipython]|3> foo b<TAB>
|
||||
bar baz
|
||||
[d:\ipython]|3> foo ba
|
||||
"""
|
||||
|
||||
if isinstance(completions, str):
|
||||
completions = completions.split()
|
||||
|
||||
def do_complete(self, event):
|
||||
return completions
|
||||
|
||||
get_ipython().set_hook('complete_command',do_complete, str_key = cmd)
|
||||
|
||||
def module_completion(line):
|
||||
"""
|
||||
Returns a list containing the completion possibilities for an import line.
|
||||
|
||||
The line looks like this :
|
||||
'import xml.d'
|
||||
'from xml.dom import'
|
||||
"""
|
||||
|
||||
words = line.split(' ')
|
||||
nwords = len(words)
|
||||
|
||||
# from whatever <tab> -> 'import '
|
||||
if nwords == 3 and words[0] == 'from':
|
||||
return ['import ']
|
||||
|
||||
# 'from xy<tab>' or 'import xy<tab>'
|
||||
if nwords < 3 and (words[0] in {'%aimport', 'import', 'from'}) :
|
||||
if nwords == 1:
|
||||
return get_root_modules()
|
||||
mod = words[1].split('.')
|
||||
if len(mod) < 2:
|
||||
return get_root_modules()
|
||||
completion_list = try_import('.'.join(mod[:-1]), True)
|
||||
return ['.'.join(mod[:-1] + [el]) for el in completion_list]
|
||||
|
||||
# 'from xyz import abc<tab>'
|
||||
if nwords >= 3 and words[0] == 'from':
|
||||
mod = words[1]
|
||||
return try_import(mod)
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Completers
|
||||
#-----------------------------------------------------------------------------
|
||||
# These all have the func(self, event) signature to be used as custom
|
||||
# completers
|
||||
|
||||
def module_completer(self,event):
|
||||
"""Give completions after user has typed 'import ...' or 'from ...'"""
|
||||
|
||||
# This works in all versions of python. While 2.5 has
|
||||
# pkgutil.walk_packages(), that particular routine is fairly dangerous,
|
||||
# since it imports *EVERYTHING* on sys.path. That is: a) very slow b) full
|
||||
# of possibly problematic side effects.
|
||||
# This search the folders in the sys.path for available modules.
|
||||
|
||||
return module_completion(event.line)
|
||||
|
||||
# FIXME: there's a lot of logic common to the run, cd and builtin file
|
||||
# completers, that is currently reimplemented in each.
|
||||
|
||||
def magic_run_completer(self, event):
|
||||
"""Complete files that end in .py or .ipy or .ipynb for the %run command.
|
||||
"""
|
||||
comps = arg_split(event.line, strict=False)
|
||||
# relpath should be the current token that we need to complete.
|
||||
if (len(comps) > 1) and (not event.line.endswith(' ')):
|
||||
relpath = comps[-1].strip("'\"")
|
||||
else:
|
||||
relpath = ''
|
||||
|
||||
#print("\nev=", event) # dbg
|
||||
#print("rp=", relpath) # dbg
|
||||
#print('comps=', comps) # dbg
|
||||
|
||||
lglob = glob.glob
|
||||
isdir = os.path.isdir
|
||||
relpath, tilde_expand, tilde_val = expand_user(relpath)
|
||||
|
||||
# Find if the user has already typed the first filename, after which we
|
||||
# should complete on all files, since after the first one other files may
|
||||
# be arguments to the input script.
|
||||
|
||||
if any(magic_run_re.match(c) for c in comps):
|
||||
matches = [f.replace('\\','/') + ('/' if isdir(f) else '')
|
||||
for f in lglob(relpath+'*')]
|
||||
else:
|
||||
dirs = [f.replace('\\','/') + "/" for f in lglob(relpath+'*') if isdir(f)]
|
||||
pys = [f.replace('\\','/')
|
||||
for f in lglob(relpath+'*.py') + lglob(relpath+'*.ipy') +
|
||||
lglob(relpath+'*.ipynb') + lglob(relpath + '*.pyw')]
|
||||
|
||||
matches = dirs + pys
|
||||
|
||||
#print('run comp:', dirs+pys) # dbg
|
||||
return [compress_user(p, tilde_expand, tilde_val) for p in matches]
|
||||
|
||||
|
||||
def cd_completer(self, event):
|
||||
"""Completer function for cd, which only returns directories."""
|
||||
ip = get_ipython()
|
||||
relpath = event.symbol
|
||||
|
||||
#print(event) # dbg
|
||||
if event.line.endswith('-b') or ' -b ' in event.line:
|
||||
# return only bookmark completions
|
||||
bkms = self.db.get('bookmarks', None)
|
||||
if bkms:
|
||||
return bkms.keys()
|
||||
else:
|
||||
return []
|
||||
|
||||
if event.symbol == '-':
|
||||
width_dh = str(len(str(len(ip.user_ns['_dh']) + 1)))
|
||||
# jump in directory history by number
|
||||
fmt = '-%0' + width_dh +'d [%s]'
|
||||
ents = [ fmt % (i,s) for i,s in enumerate(ip.user_ns['_dh'])]
|
||||
if len(ents) > 1:
|
||||
return ents
|
||||
return []
|
||||
|
||||
if event.symbol.startswith('--'):
|
||||
return ["--" + os.path.basename(d) for d in ip.user_ns['_dh']]
|
||||
|
||||
# Expand ~ in path and normalize directory separators.
|
||||
relpath, tilde_expand, tilde_val = expand_user(relpath)
|
||||
relpath = relpath.replace('\\','/')
|
||||
|
||||
found = []
|
||||
for d in [f.replace('\\','/') + '/' for f in glob.glob(relpath+'*')
|
||||
if os.path.isdir(f)]:
|
||||
if ' ' in d:
|
||||
# we don't want to deal with any of that, complex code
|
||||
# for this is elsewhere
|
||||
raise TryNext
|
||||
|
||||
found.append(d)
|
||||
|
||||
if not found:
|
||||
if os.path.isdir(relpath):
|
||||
return [compress_user(relpath, tilde_expand, tilde_val)]
|
||||
|
||||
# if no completions so far, try bookmarks
|
||||
bks = self.db.get('bookmarks',{})
|
||||
bkmatches = [s for s in bks if s.startswith(event.symbol)]
|
||||
if bkmatches:
|
||||
return bkmatches
|
||||
|
||||
raise TryNext
|
||||
|
||||
return [compress_user(p, tilde_expand, tilde_val) for p in found]
|
||||
|
||||
def reset_completer(self, event):
|
||||
"A completer for %reset magic"
|
||||
return '-f -s in out array dhist'.split()
|
248
.venv/lib/python3.12/site-packages/IPython/core/crashhandler.py
Normal file
248
.venv/lib/python3.12/site-packages/IPython/core/crashhandler.py
Normal file
@ -0,0 +1,248 @@
|
||||
# encoding: utf-8
|
||||
"""sys.excepthook for IPython itself, leaves a detailed report on disk.
|
||||
|
||||
Authors:
|
||||
|
||||
* Fernando Perez
|
||||
* Brian E. Granger
|
||||
"""
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (C) 2001-2007 Fernando Perez. <fperez@colorado.edu>
|
||||
# Copyright (C) 2008-2011 The IPython Development Team
|
||||
#
|
||||
# Distributed under the terms of the BSD License. The full license is in
|
||||
# the file COPYING, distributed as part of this software.
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Imports
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
import sys
|
||||
import traceback
|
||||
from pprint import pformat
|
||||
from pathlib import Path
|
||||
|
||||
import builtins as builtin_mod
|
||||
|
||||
from IPython.core import ultratb
|
||||
from IPython.core.application import Application
|
||||
from IPython.core.release import author_email
|
||||
from IPython.utils.sysinfo import sys_info
|
||||
|
||||
from IPython.core.release import __version__ as version
|
||||
|
||||
from typing import Optional, Dict
|
||||
import types
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Code
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
# Template for the user message.
|
||||
_default_message_template = """\
|
||||
Oops, {app_name} crashed. We do our best to make it stable, but...
|
||||
|
||||
A crash report was automatically generated with the following information:
|
||||
- A verbatim copy of the crash traceback.
|
||||
- A copy of your input history during this session.
|
||||
- Data on your current {app_name} configuration.
|
||||
|
||||
It was left in the file named:
|
||||
\t'{crash_report_fname}'
|
||||
If you can email this file to the developers, the information in it will help
|
||||
them in understanding and correcting the problem.
|
||||
|
||||
You can mail it to: {contact_name} at {contact_email}
|
||||
with the subject '{app_name} Crash Report'.
|
||||
|
||||
If you want to do it now, the following command will work (under Unix):
|
||||
mail -s '{app_name} Crash Report' {contact_email} < {crash_report_fname}
|
||||
|
||||
In your email, please also include information about:
|
||||
- The operating system under which the crash happened: Linux, macOS, Windows,
|
||||
other, and which exact version (for example: Ubuntu 16.04.3, macOS 10.13.2,
|
||||
Windows 10 Pro), and whether it is 32-bit or 64-bit;
|
||||
- How {app_name} was installed: using pip or conda, from GitHub, as part of
|
||||
a Docker container, or other, providing more detail if possible;
|
||||
- How to reproduce the crash: what exact sequence of instructions can one
|
||||
input to get the same crash? Ideally, find a minimal yet complete sequence
|
||||
of instructions that yields the crash.
|
||||
|
||||
To ensure accurate tracking of this issue, please file a report about it at:
|
||||
{bug_tracker}
|
||||
"""
|
||||
|
||||
_lite_message_template = """
|
||||
If you suspect this is an IPython {version} bug, please report it at:
|
||||
https://github.com/ipython/ipython/issues
|
||||
or send an email to the mailing list at {email}
|
||||
|
||||
You can print a more detailed traceback right now with "%tb", or use "%debug"
|
||||
to interactively debug it.
|
||||
|
||||
Extra-detailed tracebacks for bug-reporting purposes can be enabled via:
|
||||
{config}Application.verbose_crash=True
|
||||
"""
|
||||
|
||||
|
||||
class CrashHandler:
|
||||
"""Customizable crash handlers for IPython applications.
|
||||
|
||||
Instances of this class provide a :meth:`__call__` method which can be
|
||||
used as a ``sys.excepthook``. The :meth:`__call__` signature is::
|
||||
|
||||
def __call__(self, etype, evalue, etb)
|
||||
"""
|
||||
|
||||
message_template = _default_message_template
|
||||
section_sep = '\n\n'+'*'*75+'\n\n'
|
||||
info: Dict[str, Optional[str]]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
app: Application,
|
||||
contact_name: Optional[str] = None,
|
||||
contact_email: Optional[str] = None,
|
||||
bug_tracker: Optional[str] = None,
|
||||
show_crash_traceback: bool = True,
|
||||
call_pdb: bool = False,
|
||||
):
|
||||
"""Create a new crash handler
|
||||
|
||||
Parameters
|
||||
----------
|
||||
app : Application
|
||||
A running :class:`Application` instance, which will be queried at
|
||||
crash time for internal information.
|
||||
contact_name : str
|
||||
A string with the name of the person to contact.
|
||||
contact_email : str
|
||||
A string with the email address of the contact.
|
||||
bug_tracker : str
|
||||
A string with the URL for your project's bug tracker.
|
||||
show_crash_traceback : bool
|
||||
If false, don't print the crash traceback on stderr, only generate
|
||||
the on-disk report
|
||||
call_pdb
|
||||
Whether to call pdb on crash
|
||||
|
||||
Attributes
|
||||
----------
|
||||
These instances contain some non-argument attributes which allow for
|
||||
further customization of the crash handler's behavior. Please see the
|
||||
source for further details.
|
||||
|
||||
"""
|
||||
self.crash_report_fname = "Crash_report_%s.txt" % app.name
|
||||
self.app = app
|
||||
self.call_pdb = call_pdb
|
||||
#self.call_pdb = True # dbg
|
||||
self.show_crash_traceback = show_crash_traceback
|
||||
self.info = dict(app_name = app.name,
|
||||
contact_name = contact_name,
|
||||
contact_email = contact_email,
|
||||
bug_tracker = bug_tracker,
|
||||
crash_report_fname = self.crash_report_fname)
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
etype: type[BaseException],
|
||||
evalue: BaseException,
|
||||
etb: types.TracebackType,
|
||||
) -> None:
|
||||
"""Handle an exception, call for compatible with sys.excepthook"""
|
||||
|
||||
# do not allow the crash handler to be called twice without reinstalling it
|
||||
# this prevents unlikely errors in the crash handling from entering an
|
||||
# infinite loop.
|
||||
sys.excepthook = sys.__excepthook__
|
||||
|
||||
# Report tracebacks shouldn't use color in general (safer for users)
|
||||
color_scheme = 'NoColor'
|
||||
|
||||
# Use this ONLY for developer debugging (keep commented out for release)
|
||||
# color_scheme = 'Linux' # dbg
|
||||
ipython_dir = getattr(self.app, "ipython_dir", None)
|
||||
if ipython_dir is not None:
|
||||
assert isinstance(ipython_dir, str)
|
||||
rptdir = Path(ipython_dir)
|
||||
else:
|
||||
rptdir = Path.cwd()
|
||||
if not rptdir.is_dir():
|
||||
rptdir = Path.cwd()
|
||||
report_name = rptdir / self.crash_report_fname
|
||||
# write the report filename into the instance dict so it can get
|
||||
# properly expanded out in the user message template
|
||||
self.crash_report_fname = str(report_name)
|
||||
self.info["crash_report_fname"] = str(report_name)
|
||||
TBhandler = ultratb.VerboseTB(
|
||||
color_scheme=color_scheme,
|
||||
long_header=True,
|
||||
call_pdb=self.call_pdb,
|
||||
)
|
||||
if self.call_pdb:
|
||||
TBhandler(etype,evalue,etb)
|
||||
return
|
||||
else:
|
||||
traceback = TBhandler.text(etype,evalue,etb,context=31)
|
||||
|
||||
# print traceback to screen
|
||||
if self.show_crash_traceback:
|
||||
print(traceback, file=sys.stderr)
|
||||
|
||||
# and generate a complete report on disk
|
||||
try:
|
||||
report = open(report_name, "w", encoding="utf-8")
|
||||
except:
|
||||
print('Could not create crash report on disk.', file=sys.stderr)
|
||||
return
|
||||
|
||||
with report:
|
||||
# Inform user on stderr of what happened
|
||||
print('\n'+'*'*70+'\n', file=sys.stderr)
|
||||
print(self.message_template.format(**self.info), file=sys.stderr)
|
||||
|
||||
# Construct report on disk
|
||||
report.write(self.make_report(str(traceback)))
|
||||
|
||||
builtin_mod.input("Hit <Enter> to quit (your terminal may close):")
|
||||
|
||||
def make_report(self, traceback: str) -> str:
|
||||
"""Return a string containing a crash report."""
|
||||
|
||||
sec_sep = self.section_sep
|
||||
|
||||
report = ['*'*75+'\n\n'+'IPython post-mortem report\n\n']
|
||||
rpt_add = report.append
|
||||
rpt_add(sys_info())
|
||||
|
||||
try:
|
||||
config = pformat(self.app.config)
|
||||
rpt_add(sec_sep)
|
||||
rpt_add("Application name: %s\n\n" % self.app.name)
|
||||
rpt_add("Current user configuration structure:\n\n")
|
||||
rpt_add(config)
|
||||
except:
|
||||
pass
|
||||
rpt_add(sec_sep+'Crash traceback:\n\n' + traceback)
|
||||
|
||||
return ''.join(report)
|
||||
|
||||
|
||||
def crash_handler_lite(
|
||||
etype: type[BaseException], evalue: BaseException, tb: types.TracebackType
|
||||
) -> None:
|
||||
"""a light excepthook, adding a small message to the usual traceback"""
|
||||
traceback.print_exception(etype, evalue, tb)
|
||||
|
||||
from IPython.core.interactiveshell import InteractiveShell
|
||||
if InteractiveShell.initialized():
|
||||
# we are in a Shell environment, give %magic example
|
||||
config = "%config "
|
||||
else:
|
||||
# we are not in a shell, show generic config
|
||||
config = "c."
|
||||
print(_lite_message_template.format(email=author_email, config=config, version=version), file=sys.stderr)
|
||||
|
1136
.venv/lib/python3.12/site-packages/IPython/core/debugger.py
Normal file
1136
.venv/lib/python3.12/site-packages/IPython/core/debugger.py
Normal file
File diff suppressed because it is too large
Load Diff
1373
.venv/lib/python3.12/site-packages/IPython/core/display.py
Normal file
1373
.venv/lib/python3.12/site-packages/IPython/core/display.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,391 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Top-level display functions for displaying object in different formats."""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
|
||||
from binascii import b2a_hex
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
__all__ = ['display', 'clear_output', 'publish_display_data', 'update_display', 'DisplayHandle']
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# utility functions
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _merge(d1, d2):
|
||||
"""Like update, but merges sub-dicts instead of clobbering at the top level.
|
||||
|
||||
Updates d1 in-place
|
||||
"""
|
||||
|
||||
if not isinstance(d2, dict) or not isinstance(d1, dict):
|
||||
return d2
|
||||
for key, value in d2.items():
|
||||
d1[key] = _merge(d1.get(key), value)
|
||||
return d1
|
||||
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Main functions
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
class _Sentinel:
|
||||
def __repr__(self):
|
||||
return "<deprecated>"
|
||||
|
||||
|
||||
_sentinel = _Sentinel()
|
||||
|
||||
# use * to indicate transient is keyword-only
|
||||
def publish_display_data(
|
||||
data, metadata=None, source=_sentinel, *, transient=None, **kwargs
|
||||
):
|
||||
"""Publish data and metadata to all frontends.
|
||||
|
||||
See the ``display_data`` message in the messaging documentation for
|
||||
more details about this message type.
|
||||
|
||||
Keys of data and metadata can be any mime-type.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
data : dict
|
||||
A dictionary having keys that are valid MIME types (like
|
||||
'text/plain' or 'image/svg+xml') and values that are the data for
|
||||
that MIME type. The data itself must be a JSON'able data
|
||||
structure. Minimally all data should have the 'text/plain' data,
|
||||
which can be displayed by all frontends. If more than the plain
|
||||
text is given, it is up to the frontend to decide which
|
||||
representation to use.
|
||||
metadata : dict
|
||||
A dictionary for metadata related to the data. This can contain
|
||||
arbitrary key, value pairs that frontends can use to interpret
|
||||
the data. mime-type keys matching those in data can be used
|
||||
to specify metadata about particular representations.
|
||||
source : str, deprecated
|
||||
Unused.
|
||||
transient : dict, keyword-only
|
||||
A dictionary of transient data, such as display_id.
|
||||
"""
|
||||
from IPython.core.interactiveshell import InteractiveShell
|
||||
|
||||
if source is not _sentinel:
|
||||
warnings.warn(
|
||||
"The `source` parameter emit a deprecation warning since"
|
||||
" IPython 8.0, it had no effects for a long time and will "
|
||||
" be removed in future versions.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
display_pub = InteractiveShell.instance().display_pub
|
||||
|
||||
# only pass transient if supplied,
|
||||
# to avoid errors with older ipykernel.
|
||||
# TODO: We could check for ipykernel version and provide a detailed upgrade message.
|
||||
if transient:
|
||||
kwargs['transient'] = transient
|
||||
|
||||
display_pub.publish(
|
||||
data=data,
|
||||
metadata=metadata,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
|
||||
def _new_id():
|
||||
"""Generate a new random text id with urandom"""
|
||||
return b2a_hex(os.urandom(16)).decode('ascii')
|
||||
|
||||
|
||||
def display(
|
||||
*objs,
|
||||
include=None,
|
||||
exclude=None,
|
||||
metadata=None,
|
||||
transient=None,
|
||||
display_id=None,
|
||||
raw=False,
|
||||
clear=False,
|
||||
**kwargs,
|
||||
):
|
||||
"""Display a Python object in all frontends.
|
||||
|
||||
By default all representations will be computed and sent to the frontends.
|
||||
Frontends can decide which representation is used and how.
|
||||
|
||||
In terminal IPython this will be similar to using :func:`print`, for use in richer
|
||||
frontends see Jupyter notebook examples with rich display logic.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
*objs : object
|
||||
The Python objects to display.
|
||||
raw : bool, optional
|
||||
Are the objects to be displayed already mimetype-keyed dicts of raw display data,
|
||||
or Python objects that need to be formatted before display? [default: False]
|
||||
include : list, tuple or set, optional
|
||||
A list of format type strings (MIME types) to include in the
|
||||
format data dict. If this is set *only* the format types included
|
||||
in this list will be computed.
|
||||
exclude : list, tuple or set, optional
|
||||
A list of format type strings (MIME types) to exclude in the format
|
||||
data dict. If this is set all format types will be computed,
|
||||
except for those included in this argument.
|
||||
metadata : dict, optional
|
||||
A dictionary of metadata to associate with the output.
|
||||
mime-type keys in this dictionary will be associated with the individual
|
||||
representation formats, if they exist.
|
||||
transient : dict, optional
|
||||
A dictionary of transient data to associate with the output.
|
||||
Data in this dict should not be persisted to files (e.g. notebooks).
|
||||
display_id : str, bool optional
|
||||
Set an id for the display.
|
||||
This id can be used for updating this display area later via update_display.
|
||||
If given as `True`, generate a new `display_id`
|
||||
clear : bool, optional
|
||||
Should the output area be cleared before displaying anything? If True,
|
||||
this will wait for additional output before clearing. [default: False]
|
||||
**kwargs : additional keyword-args, optional
|
||||
Additional keyword-arguments are passed through to the display publisher.
|
||||
|
||||
Returns
|
||||
-------
|
||||
handle: DisplayHandle
|
||||
Returns a handle on updatable displays for use with :func:`update_display`,
|
||||
if `display_id` is given. Returns :any:`None` if no `display_id` is given
|
||||
(default).
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> class Json(object):
|
||||
... def __init__(self, json):
|
||||
... self.json = json
|
||||
... def _repr_pretty_(self, pp, cycle):
|
||||
... import json
|
||||
... pp.text(json.dumps(self.json, indent=2))
|
||||
... def __repr__(self):
|
||||
... return str(self.json)
|
||||
...
|
||||
|
||||
>>> d = Json({1:2, 3: {4:5}})
|
||||
|
||||
>>> print(d)
|
||||
{1: 2, 3: {4: 5}}
|
||||
|
||||
>>> display(d)
|
||||
{
|
||||
"1": 2,
|
||||
"3": {
|
||||
"4": 5
|
||||
}
|
||||
}
|
||||
|
||||
>>> def int_formatter(integer, pp, cycle):
|
||||
... pp.text('I'*integer)
|
||||
|
||||
>>> plain = get_ipython().display_formatter.formatters['text/plain']
|
||||
>>> plain.for_type(int, int_formatter)
|
||||
<function _repr_pprint at 0x...>
|
||||
>>> display(7-5)
|
||||
II
|
||||
|
||||
>>> del plain.type_printers[int]
|
||||
>>> display(7-5)
|
||||
2
|
||||
|
||||
See Also
|
||||
--------
|
||||
:func:`update_display`
|
||||
|
||||
Notes
|
||||
-----
|
||||
In Python, objects can declare their textual representation using the
|
||||
`__repr__` method. IPython expands on this idea and allows objects to declare
|
||||
other, rich representations including:
|
||||
|
||||
- HTML
|
||||
- JSON
|
||||
- PNG
|
||||
- JPEG
|
||||
- SVG
|
||||
- LaTeX
|
||||
|
||||
A single object can declare some or all of these representations; all are
|
||||
handled by IPython's display system.
|
||||
|
||||
The main idea of the first approach is that you have to implement special
|
||||
display methods when you define your class, one for each representation you
|
||||
want to use. Here is a list of the names of the special methods and the
|
||||
values they must return:
|
||||
|
||||
- `_repr_html_`: return raw HTML as a string, or a tuple (see below).
|
||||
- `_repr_json_`: return a JSONable dict, or a tuple (see below).
|
||||
- `_repr_jpeg_`: return raw JPEG data, or a tuple (see below).
|
||||
- `_repr_png_`: return raw PNG data, or a tuple (see below).
|
||||
- `_repr_svg_`: return raw SVG data as a string, or a tuple (see below).
|
||||
- `_repr_latex_`: return LaTeX commands in a string surrounded by "$",
|
||||
or a tuple (see below).
|
||||
- `_repr_mimebundle_`: return a full mimebundle containing the mapping
|
||||
from all mimetypes to data.
|
||||
Use this for any mime-type not listed above.
|
||||
|
||||
The above functions may also return the object's metadata alonside the
|
||||
data. If the metadata is available, the functions will return a tuple
|
||||
containing the data and metadata, in that order. If there is no metadata
|
||||
available, then the functions will return the data only.
|
||||
|
||||
When you are directly writing your own classes, you can adapt them for
|
||||
display in IPython by following the above approach. But in practice, you
|
||||
often need to work with existing classes that you can't easily modify.
|
||||
|
||||
You can refer to the documentation on integrating with the display system in
|
||||
order to register custom formatters for already existing types
|
||||
(:ref:`integrating_rich_display`).
|
||||
|
||||
.. versionadded:: 5.4 display available without import
|
||||
.. versionadded:: 6.1 display available without import
|
||||
|
||||
Since IPython 5.4 and 6.1 :func:`display` is automatically made available to
|
||||
the user without import. If you are using display in a document that might
|
||||
be used in a pure python context or with older version of IPython, use the
|
||||
following import at the top of your file::
|
||||
|
||||
from IPython.display import display
|
||||
|
||||
"""
|
||||
from IPython.core.interactiveshell import InteractiveShell
|
||||
|
||||
if not InteractiveShell.initialized():
|
||||
# Directly print objects.
|
||||
print(*objs)
|
||||
return
|
||||
|
||||
if transient is None:
|
||||
transient = {}
|
||||
if metadata is None:
|
||||
metadata={}
|
||||
if display_id:
|
||||
if display_id is True:
|
||||
display_id = _new_id()
|
||||
transient['display_id'] = display_id
|
||||
if kwargs.get('update') and 'display_id' not in transient:
|
||||
raise TypeError('display_id required for update_display')
|
||||
if transient:
|
||||
kwargs['transient'] = transient
|
||||
|
||||
if not objs and display_id:
|
||||
# if given no objects, but still a request for a display_id,
|
||||
# we assume the user wants to insert an empty output that
|
||||
# can be updated later
|
||||
objs = [{}]
|
||||
raw = True
|
||||
|
||||
if not raw:
|
||||
format = InteractiveShell.instance().display_formatter.format
|
||||
|
||||
if clear:
|
||||
clear_output(wait=True)
|
||||
|
||||
for obj in objs:
|
||||
if raw:
|
||||
publish_display_data(data=obj, metadata=metadata, **kwargs)
|
||||
else:
|
||||
format_dict, md_dict = format(obj, include=include, exclude=exclude)
|
||||
if not format_dict:
|
||||
# nothing to display (e.g. _ipython_display_ took over)
|
||||
continue
|
||||
if metadata:
|
||||
# kwarg-specified metadata gets precedence
|
||||
_merge(md_dict, metadata)
|
||||
publish_display_data(data=format_dict, metadata=md_dict, **kwargs)
|
||||
if display_id:
|
||||
return DisplayHandle(display_id)
|
||||
|
||||
|
||||
# use * for keyword-only display_id arg
|
||||
def update_display(obj, *, display_id, **kwargs):
|
||||
"""Update an existing display by id
|
||||
|
||||
Parameters
|
||||
----------
|
||||
obj
|
||||
The object with which to update the display
|
||||
display_id : keyword-only
|
||||
The id of the display to update
|
||||
|
||||
See Also
|
||||
--------
|
||||
:func:`display`
|
||||
"""
|
||||
kwargs['update'] = True
|
||||
display(obj, display_id=display_id, **kwargs)
|
||||
|
||||
|
||||
class DisplayHandle(object):
|
||||
"""A handle on an updatable display
|
||||
|
||||
Call `.update(obj)` to display a new object.
|
||||
|
||||
Call `.display(obj`) to add a new instance of this display,
|
||||
and update existing instances.
|
||||
|
||||
See Also
|
||||
--------
|
||||
|
||||
:func:`display`, :func:`update_display`
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, display_id=None):
|
||||
if display_id is None:
|
||||
display_id = _new_id()
|
||||
self.display_id = display_id
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s display_id=%s>" % (self.__class__.__name__, self.display_id)
|
||||
|
||||
def display(self, obj, **kwargs):
|
||||
"""Make a new display with my id, updating existing instances.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
obj
|
||||
object to display
|
||||
**kwargs
|
||||
additional keyword arguments passed to display
|
||||
"""
|
||||
display(obj, display_id=self.display_id, **kwargs)
|
||||
|
||||
def update(self, obj, **kwargs):
|
||||
"""Update existing displays with my id
|
||||
|
||||
Parameters
|
||||
----------
|
||||
obj
|
||||
object to display
|
||||
**kwargs
|
||||
additional keyword arguments passed to update_display
|
||||
"""
|
||||
update_display(obj, display_id=self.display_id, **kwargs)
|
||||
|
||||
|
||||
def clear_output(wait=False):
|
||||
"""Clear the output of the current cell receiving output.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
wait : bool [default: false]
|
||||
Wait to clear the output until new output is available to replace it."""
|
||||
from IPython.core.interactiveshell import InteractiveShell
|
||||
if InteractiveShell.initialized():
|
||||
InteractiveShell.instance().display_pub.clear_output(wait)
|
||||
else:
|
||||
print('\033[2K\r', end='')
|
||||
sys.stdout.flush()
|
||||
print('\033[2K\r', end='')
|
||||
sys.stderr.flush()
|
@ -0,0 +1,70 @@
|
||||
# encoding: utf-8
|
||||
"""
|
||||
A context manager for handling sys.displayhook.
|
||||
|
||||
Authors:
|
||||
|
||||
* Robert Kern
|
||||
* Brian Granger
|
||||
"""
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (C) 2008-2011 The IPython Development Team
|
||||
#
|
||||
# Distributed under the terms of the BSD License. The full license is in
|
||||
# the file COPYING, distributed as part of this software.
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Imports
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
import sys
|
||||
|
||||
from traitlets.config.configurable import Configurable
|
||||
from traitlets import Any
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Classes and functions
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
|
||||
class DisplayTrap(Configurable):
|
||||
"""Object to manage sys.displayhook.
|
||||
|
||||
This came from IPython.core.kernel.display_hook, but is simplified
|
||||
(no callbacks or formatters) until more of the core is refactored.
|
||||
"""
|
||||
|
||||
hook = Any()
|
||||
|
||||
def __init__(self, hook=None):
|
||||
super(DisplayTrap, self).__init__(hook=hook, config=None)
|
||||
self.old_hook = None
|
||||
# We define this to track if a single BuiltinTrap is nested.
|
||||
# Only turn off the trap when the outermost call to __exit__ is made.
|
||||
self._nested_level = 0
|
||||
|
||||
def __enter__(self):
|
||||
if self._nested_level == 0:
|
||||
self.set()
|
||||
self._nested_level += 1
|
||||
return self
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
if self._nested_level == 1:
|
||||
self.unset()
|
||||
self._nested_level -= 1
|
||||
# Returning False will cause exceptions to propagate
|
||||
return False
|
||||
|
||||
def set(self):
|
||||
"""Set the hook."""
|
||||
if sys.displayhook is not self.hook:
|
||||
self.old_hook = sys.displayhook
|
||||
sys.displayhook = self.hook
|
||||
|
||||
def unset(self):
|
||||
"""Unset the hook."""
|
||||
sys.displayhook = self.old_hook
|
||||
|
336
.venv/lib/python3.12/site-packages/IPython/core/displayhook.py
Normal file
336
.venv/lib/python3.12/site-packages/IPython/core/displayhook.py
Normal file
@ -0,0 +1,336 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Displayhook for IPython.
|
||||
|
||||
This defines a callable class that IPython uses for `sys.displayhook`.
|
||||
"""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import builtins as builtin_mod
|
||||
import sys
|
||||
import io as _io
|
||||
import tokenize
|
||||
|
||||
from traitlets.config.configurable import Configurable
|
||||
from traitlets import Instance, Float
|
||||
from warnings import warn
|
||||
|
||||
# TODO: Move the various attributes (cache_size, [others now moved]). Some
|
||||
# of these are also attributes of InteractiveShell. They should be on ONE object
|
||||
# only and the other objects should ask that one object for their values.
|
||||
|
||||
class DisplayHook(Configurable):
|
||||
"""The custom IPython displayhook to replace sys.displayhook.
|
||||
|
||||
This class does many things, but the basic idea is that it is a callable
|
||||
that gets called anytime user code returns a value.
|
||||
"""
|
||||
|
||||
shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
|
||||
allow_none=True)
|
||||
exec_result = Instance('IPython.core.interactiveshell.ExecutionResult',
|
||||
allow_none=True)
|
||||
cull_fraction = Float(0.2)
|
||||
|
||||
def __init__(self, shell=None, cache_size=1000, **kwargs):
|
||||
super(DisplayHook, self).__init__(shell=shell, **kwargs)
|
||||
cache_size_min = 3
|
||||
if cache_size <= 0:
|
||||
self.do_full_cache = 0
|
||||
cache_size = 0
|
||||
elif cache_size < cache_size_min:
|
||||
self.do_full_cache = 0
|
||||
cache_size = 0
|
||||
warn('caching was disabled (min value for cache size is %s).' %
|
||||
cache_size_min,stacklevel=3)
|
||||
else:
|
||||
self.do_full_cache = 1
|
||||
|
||||
self.cache_size = cache_size
|
||||
|
||||
# we need a reference to the user-level namespace
|
||||
self.shell = shell
|
||||
|
||||
self._,self.__,self.___ = '','',''
|
||||
|
||||
# these are deliberately global:
|
||||
to_user_ns = {'_':self._,'__':self.__,'___':self.___}
|
||||
self.shell.user_ns.update(to_user_ns)
|
||||
|
||||
@property
|
||||
def prompt_count(self):
|
||||
return self.shell.execution_count
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
# Methods used in __call__. Override these methods to modify the behavior
|
||||
# of the displayhook.
|
||||
#-------------------------------------------------------------------------
|
||||
|
||||
def check_for_underscore(self):
|
||||
"""Check if the user has set the '_' variable by hand."""
|
||||
# If something injected a '_' variable in __builtin__, delete
|
||||
# ipython's automatic one so we don't clobber that. gettext() in
|
||||
# particular uses _, so we need to stay away from it.
|
||||
if '_' in builtin_mod.__dict__:
|
||||
try:
|
||||
user_value = self.shell.user_ns['_']
|
||||
if user_value is not self._:
|
||||
return
|
||||
del self.shell.user_ns['_']
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def quiet(self):
|
||||
"""Should we silence the display hook because of ';'?"""
|
||||
# do not print output if input ends in ';'
|
||||
|
||||
try:
|
||||
cell = self.shell.history_manager.input_hist_parsed[-1]
|
||||
except IndexError:
|
||||
# some uses of ipshellembed may fail here
|
||||
return False
|
||||
|
||||
return self.semicolon_at_end_of_expression(cell)
|
||||
|
||||
@staticmethod
|
||||
def semicolon_at_end_of_expression(expression):
|
||||
"""Parse Python expression and detects whether last token is ';'"""
|
||||
|
||||
sio = _io.StringIO(expression)
|
||||
tokens = list(tokenize.generate_tokens(sio.readline))
|
||||
|
||||
for token in reversed(tokens):
|
||||
if token[0] in (tokenize.ENDMARKER, tokenize.NL, tokenize.NEWLINE, tokenize.COMMENT):
|
||||
continue
|
||||
if (token[0] == tokenize.OP) and (token[1] == ';'):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def start_displayhook(self):
|
||||
"""Start the displayhook, initializing resources."""
|
||||
pass
|
||||
|
||||
def write_output_prompt(self):
|
||||
"""Write the output prompt.
|
||||
|
||||
The default implementation simply writes the prompt to
|
||||
``sys.stdout``.
|
||||
"""
|
||||
# Use write, not print which adds an extra space.
|
||||
sys.stdout.write(self.shell.separate_out)
|
||||
outprompt = 'Out[{}]: '.format(self.shell.execution_count)
|
||||
if self.do_full_cache:
|
||||
sys.stdout.write(outprompt)
|
||||
|
||||
def compute_format_data(self, result):
|
||||
"""Compute format data of the object to be displayed.
|
||||
|
||||
The format data is a generalization of the :func:`repr` of an object.
|
||||
In the default implementation the format data is a :class:`dict` of
|
||||
key value pair where the keys are valid MIME types and the values
|
||||
are JSON'able data structure containing the raw data for that MIME
|
||||
type. It is up to frontends to determine pick a MIME to to use and
|
||||
display that data in an appropriate manner.
|
||||
|
||||
This method only computes the format data for the object and should
|
||||
NOT actually print or write that to a stream.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
result : object
|
||||
The Python object passed to the display hook, whose format will be
|
||||
computed.
|
||||
|
||||
Returns
|
||||
-------
|
||||
(format_dict, md_dict) : dict
|
||||
format_dict is a :class:`dict` whose keys are valid MIME types and values are
|
||||
JSON'able raw data for that MIME type. It is recommended that
|
||||
all return values of this should always include the "text/plain"
|
||||
MIME type representation of the object.
|
||||
md_dict is a :class:`dict` with the same MIME type keys
|
||||
of metadata associated with each output.
|
||||
|
||||
"""
|
||||
return self.shell.display_formatter.format(result)
|
||||
|
||||
# This can be set to True by the write_output_prompt method in a subclass
|
||||
prompt_end_newline = False
|
||||
|
||||
def write_format_data(self, format_dict, md_dict=None) -> None:
|
||||
"""Write the format data dict to the frontend.
|
||||
|
||||
This default version of this method simply writes the plain text
|
||||
representation of the object to ``sys.stdout``. Subclasses should
|
||||
override this method to send the entire `format_dict` to the
|
||||
frontends.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
format_dict : dict
|
||||
The format dict for the object passed to `sys.displayhook`.
|
||||
md_dict : dict (optional)
|
||||
The metadata dict to be associated with the display data.
|
||||
"""
|
||||
if 'text/plain' not in format_dict:
|
||||
# nothing to do
|
||||
return
|
||||
# We want to print because we want to always make sure we have a
|
||||
# newline, even if all the prompt separators are ''. This is the
|
||||
# standard IPython behavior.
|
||||
result_repr = format_dict['text/plain']
|
||||
if '\n' in result_repr:
|
||||
# So that multi-line strings line up with the left column of
|
||||
# the screen, instead of having the output prompt mess up
|
||||
# their first line.
|
||||
# We use the prompt template instead of the expanded prompt
|
||||
# because the expansion may add ANSI escapes that will interfere
|
||||
# with our ability to determine whether or not we should add
|
||||
# a newline.
|
||||
if not self.prompt_end_newline:
|
||||
# But avoid extraneous empty lines.
|
||||
result_repr = '\n' + result_repr
|
||||
|
||||
try:
|
||||
print(result_repr)
|
||||
except UnicodeEncodeError:
|
||||
# If a character is not supported by the terminal encoding replace
|
||||
# it with its \u or \x representation
|
||||
print(result_repr.encode(sys.stdout.encoding,'backslashreplace').decode(sys.stdout.encoding))
|
||||
|
||||
def update_user_ns(self, result):
|
||||
"""Update user_ns with various things like _, __, _1, etc."""
|
||||
|
||||
# Avoid recursive reference when displaying _oh/Out
|
||||
if self.cache_size and result is not self.shell.user_ns['_oh']:
|
||||
if len(self.shell.user_ns['_oh']) >= self.cache_size and self.do_full_cache:
|
||||
self.cull_cache()
|
||||
|
||||
# Don't overwrite '_' and friends if '_' is in __builtin__
|
||||
# (otherwise we cause buggy behavior for things like gettext). and
|
||||
# do not overwrite _, __ or ___ if one of these has been assigned
|
||||
# by the user.
|
||||
update_unders = True
|
||||
for unders in ['_'*i for i in range(1,4)]:
|
||||
if not unders in self.shell.user_ns:
|
||||
continue
|
||||
if getattr(self, unders) is not self.shell.user_ns.get(unders):
|
||||
update_unders = False
|
||||
|
||||
self.___ = self.__
|
||||
self.__ = self._
|
||||
self._ = result
|
||||
|
||||
if ('_' not in builtin_mod.__dict__) and (update_unders):
|
||||
self.shell.push({'_':self._,
|
||||
'__':self.__,
|
||||
'___':self.___}, interactive=False)
|
||||
|
||||
# hackish access to top-level namespace to create _1,_2... dynamically
|
||||
to_main = {}
|
||||
if self.do_full_cache:
|
||||
new_result = '_%s' % self.prompt_count
|
||||
to_main[new_result] = result
|
||||
self.shell.push(to_main, interactive=False)
|
||||
self.shell.user_ns['_oh'][self.prompt_count] = result
|
||||
|
||||
def fill_exec_result(self, result):
|
||||
if self.exec_result is not None:
|
||||
self.exec_result.result = result
|
||||
|
||||
def log_output(self, format_dict):
|
||||
"""Log the output."""
|
||||
if 'text/plain' not in format_dict:
|
||||
# nothing to do
|
||||
return
|
||||
if self.shell.logger.log_output:
|
||||
self.shell.logger.log_write(format_dict['text/plain'], 'output')
|
||||
self.shell.history_manager.output_hist_reprs[self.prompt_count] = \
|
||||
format_dict['text/plain']
|
||||
|
||||
def finish_displayhook(self):
|
||||
"""Finish up all displayhook activities."""
|
||||
sys.stdout.write(self.shell.separate_out2)
|
||||
sys.stdout.flush()
|
||||
|
||||
def __call__(self, result=None):
|
||||
"""Printing with history cache management.
|
||||
|
||||
This is invoked every time the interpreter needs to print, and is
|
||||
activated by setting the variable sys.displayhook to it.
|
||||
"""
|
||||
self.check_for_underscore()
|
||||
if result is not None and not self.quiet():
|
||||
self.start_displayhook()
|
||||
self.write_output_prompt()
|
||||
format_dict, md_dict = self.compute_format_data(result)
|
||||
self.update_user_ns(result)
|
||||
self.fill_exec_result(result)
|
||||
if format_dict:
|
||||
self.write_format_data(format_dict, md_dict)
|
||||
self.log_output(format_dict)
|
||||
self.finish_displayhook()
|
||||
|
||||
def cull_cache(self):
|
||||
"""Output cache is full, cull the oldest entries"""
|
||||
oh = self.shell.user_ns.get('_oh', {})
|
||||
sz = len(oh)
|
||||
cull_count = max(int(sz * self.cull_fraction), 2)
|
||||
warn('Output cache limit (currently {sz} entries) hit.\n'
|
||||
'Flushing oldest {cull_count} entries.'.format(sz=sz, cull_count=cull_count))
|
||||
|
||||
for i, n in enumerate(sorted(oh)):
|
||||
if i >= cull_count:
|
||||
break
|
||||
self.shell.user_ns.pop('_%i' % n, None)
|
||||
oh.pop(n, None)
|
||||
|
||||
|
||||
def flush(self):
|
||||
if not self.do_full_cache:
|
||||
raise ValueError("You shouldn't have reached the cache flush "
|
||||
"if full caching is not enabled!")
|
||||
# delete auto-generated vars from global namespace
|
||||
|
||||
for n in range(1,self.prompt_count + 1):
|
||||
key = '_'+repr(n)
|
||||
try:
|
||||
del self.shell.user_ns_hidden[key]
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
del self.shell.user_ns[key]
|
||||
except KeyError:
|
||||
pass
|
||||
# In some embedded circumstances, the user_ns doesn't have the
|
||||
# '_oh' key set up.
|
||||
oh = self.shell.user_ns.get('_oh', None)
|
||||
if oh is not None:
|
||||
oh.clear()
|
||||
|
||||
# Release our own references to objects:
|
||||
self._, self.__, self.___ = '', '', ''
|
||||
|
||||
if '_' not in builtin_mod.__dict__:
|
||||
self.shell.user_ns.update({'_':self._,'__':self.__,'___':self.___})
|
||||
import gc
|
||||
# TODO: Is this really needed?
|
||||
# IronPython blocks here forever
|
||||
if sys.platform != "cli":
|
||||
gc.collect()
|
||||
|
||||
|
||||
class CapturingDisplayHook(object):
|
||||
def __init__(self, shell, outputs=None):
|
||||
self.shell = shell
|
||||
if outputs is None:
|
||||
outputs = []
|
||||
self.outputs = outputs
|
||||
|
||||
def __call__(self, result=None):
|
||||
if result is None:
|
||||
return
|
||||
format_dict, md_dict = self.shell.display_formatter.format(result)
|
||||
self.outputs.append({ 'data': format_dict, 'metadata': md_dict })
|
149
.venv/lib/python3.12/site-packages/IPython/core/displaypub.py
Normal file
149
.venv/lib/python3.12/site-packages/IPython/core/displaypub.py
Normal file
@ -0,0 +1,149 @@
|
||||
"""An interface for publishing rich data to frontends.
|
||||
|
||||
There are two components of the display system:
|
||||
|
||||
* Display formatters, which take a Python object and compute the
|
||||
representation of the object in various formats (text, HTML, SVG, etc.).
|
||||
* The display publisher that is used to send the representation data to the
|
||||
various frontends.
|
||||
|
||||
This module defines the logic display publishing. The display publisher uses
|
||||
the ``display_data`` message type that is defined in the IPython messaging
|
||||
spec.
|
||||
"""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
|
||||
import sys
|
||||
|
||||
from traitlets.config.configurable import Configurable
|
||||
from traitlets import List
|
||||
|
||||
# This used to be defined here - it is imported for backwards compatibility
|
||||
from .display_functions import publish_display_data
|
||||
|
||||
import typing as t
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Main payload class
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
|
||||
class DisplayPublisher(Configurable):
|
||||
"""A traited class that publishes display data to frontends.
|
||||
|
||||
Instances of this class are created by the main IPython object and should
|
||||
be accessed there.
|
||||
"""
|
||||
|
||||
def __init__(self, shell=None, *args, **kwargs):
|
||||
self.shell = shell
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def _validate_data(self, data, metadata=None):
|
||||
"""Validate the display data.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
data : dict
|
||||
The formata data dictionary.
|
||||
metadata : dict
|
||||
Any metadata for the data.
|
||||
"""
|
||||
|
||||
if not isinstance(data, dict):
|
||||
raise TypeError('data must be a dict, got: %r' % data)
|
||||
if metadata is not None:
|
||||
if not isinstance(metadata, dict):
|
||||
raise TypeError('metadata must be a dict, got: %r' % data)
|
||||
|
||||
# use * to indicate transient, update are keyword-only
|
||||
def publish(self, data, metadata=None, source=None, *, transient=None, update=False, **kwargs) -> None:
|
||||
"""Publish data and metadata to all frontends.
|
||||
|
||||
See the ``display_data`` message in the messaging documentation for
|
||||
more details about this message type.
|
||||
|
||||
The following MIME types are currently implemented:
|
||||
|
||||
* text/plain
|
||||
* text/html
|
||||
* text/markdown
|
||||
* text/latex
|
||||
* application/json
|
||||
* application/javascript
|
||||
* image/png
|
||||
* image/jpeg
|
||||
* image/svg+xml
|
||||
|
||||
Parameters
|
||||
----------
|
||||
data : dict
|
||||
A dictionary having keys that are valid MIME types (like
|
||||
'text/plain' or 'image/svg+xml') and values that are the data for
|
||||
that MIME type. The data itself must be a JSON'able data
|
||||
structure. Minimally all data should have the 'text/plain' data,
|
||||
which can be displayed by all frontends. If more than the plain
|
||||
text is given, it is up to the frontend to decide which
|
||||
representation to use.
|
||||
metadata : dict
|
||||
A dictionary for metadata related to the data. This can contain
|
||||
arbitrary key, value pairs that frontends can use to interpret
|
||||
the data. Metadata specific to each mime-type can be specified
|
||||
in the metadata dict with the same mime-type keys as
|
||||
the data itself.
|
||||
source : str, deprecated
|
||||
Unused.
|
||||
transient : dict, keyword-only
|
||||
A dictionary for transient data.
|
||||
Data in this dictionary should not be persisted as part of saving this output.
|
||||
Examples include 'display_id'.
|
||||
update : bool, keyword-only, default: False
|
||||
If True, only update existing outputs with the same display_id,
|
||||
rather than creating a new output.
|
||||
"""
|
||||
|
||||
handlers: t.Dict = {}
|
||||
if self.shell is not None:
|
||||
handlers = getattr(self.shell, "mime_renderers", {})
|
||||
|
||||
for mime, handler in handlers.items():
|
||||
if mime in data:
|
||||
handler(data[mime], metadata.get(mime, None))
|
||||
return
|
||||
|
||||
if 'text/plain' in data:
|
||||
print(data['text/plain'])
|
||||
|
||||
def clear_output(self, wait=False):
|
||||
"""Clear the output of the cell receiving output."""
|
||||
print('\033[2K\r', end='')
|
||||
sys.stdout.flush()
|
||||
print('\033[2K\r', end='')
|
||||
sys.stderr.flush()
|
||||
|
||||
|
||||
class CapturingDisplayPublisher(DisplayPublisher):
|
||||
"""A DisplayPublisher that stores"""
|
||||
|
||||
outputs: List = List()
|
||||
|
||||
def publish(
|
||||
self, data, metadata=None, source=None, *, transient=None, update=False
|
||||
):
|
||||
self.outputs.append(
|
||||
{
|
||||
"data": data,
|
||||
"metadata": metadata,
|
||||
"transient": transient,
|
||||
"update": update,
|
||||
}
|
||||
)
|
||||
|
||||
def clear_output(self, wait=False):
|
||||
super(CapturingDisplayPublisher, self).clear_output(wait)
|
||||
|
||||
# empty the list, *do not* reassign a new list
|
||||
self.outputs.clear()
|
60
.venv/lib/python3.12/site-packages/IPython/core/error.py
Normal file
60
.venv/lib/python3.12/site-packages/IPython/core/error.py
Normal file
@ -0,0 +1,60 @@
|
||||
# encoding: utf-8
|
||||
"""
|
||||
Global exception classes for IPython.core.
|
||||
|
||||
Authors:
|
||||
|
||||
* Brian Granger
|
||||
* Fernando Perez
|
||||
* Min Ragan-Kelley
|
||||
|
||||
Notes
|
||||
-----
|
||||
"""
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (C) 2008 The IPython Development Team
|
||||
#
|
||||
# Distributed under the terms of the BSD License. The full license is in
|
||||
# the file COPYING, distributed as part of this software.
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Imports
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Exception classes
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
class IPythonCoreError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class TryNext(IPythonCoreError):
|
||||
"""Try next hook exception.
|
||||
|
||||
Raise this in your hook function to indicate that the next hook handler
|
||||
should be used to handle the operation.
|
||||
"""
|
||||
|
||||
class UsageError(IPythonCoreError):
|
||||
"""Error in magic function arguments, etc.
|
||||
|
||||
Something that probably won't warrant a full traceback, but should
|
||||
nevertheless interrupt a macro / batch file.
|
||||
"""
|
||||
|
||||
class StdinNotImplementedError(IPythonCoreError, NotImplementedError):
|
||||
"""raw_input was requested in a context where it is not supported
|
||||
|
||||
For use in IPython kernels, where only some frontends may support
|
||||
stdin requests.
|
||||
"""
|
||||
|
||||
class InputRejected(Exception):
|
||||
"""Input rejected by ast transformer.
|
||||
|
||||
Raise this in your NodeTransformer to indicate that InteractiveShell should
|
||||
not execute the supplied input.
|
||||
"""
|
158
.venv/lib/python3.12/site-packages/IPython/core/events.py
Normal file
158
.venv/lib/python3.12/site-packages/IPython/core/events.py
Normal file
@ -0,0 +1,158 @@
|
||||
"""Infrastructure for registering and firing callbacks on application events.
|
||||
|
||||
Unlike :mod:`IPython.core.hooks`, which lets end users set single functions to
|
||||
be called at specific times, or a collection of alternative methods to try,
|
||||
callbacks are designed to be used by extension authors. A number of callbacks
|
||||
can be registered for the same event without needing to be aware of one another.
|
||||
|
||||
The functions defined in this module are no-ops indicating the names of available
|
||||
events and the arguments which will be passed to them.
|
||||
|
||||
.. note::
|
||||
|
||||
This API is experimental in IPython 2.0, and may be revised in future versions.
|
||||
"""
|
||||
|
||||
|
||||
class EventManager(object):
|
||||
"""Manage a collection of events and a sequence of callbacks for each.
|
||||
|
||||
This is attached to :class:`~IPython.core.interactiveshell.InteractiveShell`
|
||||
instances as an ``events`` attribute.
|
||||
|
||||
.. note::
|
||||
|
||||
This API is experimental in IPython 2.0, and may be revised in future versions.
|
||||
"""
|
||||
|
||||
def __init__(self, shell, available_events, print_on_error=True):
|
||||
"""Initialise the :class:`CallbackManager`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
shell
|
||||
The :class:`~IPython.core.interactiveshell.InteractiveShell` instance
|
||||
available_events
|
||||
An iterable of names for callback events.
|
||||
print_on_error:
|
||||
A boolean flag to set whether the EventManager will print a warning which a event errors.
|
||||
"""
|
||||
self.shell = shell
|
||||
self.callbacks = {n:[] for n in available_events}
|
||||
self.print_on_error = print_on_error
|
||||
|
||||
def register(self, event, function):
|
||||
"""Register a new event callback.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
event : str
|
||||
The event for which to register this callback.
|
||||
function : callable
|
||||
A function to be called on the given event. It should take the same
|
||||
parameters as the appropriate callback prototype.
|
||||
|
||||
Raises
|
||||
------
|
||||
TypeError
|
||||
If ``function`` is not callable.
|
||||
KeyError
|
||||
If ``event`` is not one of the known events.
|
||||
"""
|
||||
if not callable(function):
|
||||
raise TypeError('Need a callable, got %r' % function)
|
||||
if function not in self.callbacks[event]:
|
||||
self.callbacks[event].append(function)
|
||||
|
||||
def unregister(self, event, function):
|
||||
"""Remove a callback from the given event."""
|
||||
if function in self.callbacks[event]:
|
||||
return self.callbacks[event].remove(function)
|
||||
|
||||
raise ValueError('Function {!r} is not registered as a {} callback'.format(function, event))
|
||||
|
||||
def trigger(self, event, *args, **kwargs):
|
||||
"""Call callbacks for ``event``.
|
||||
|
||||
Any additional arguments are passed to all callbacks registered for this
|
||||
event. Exceptions raised by callbacks are caught, and a message printed.
|
||||
"""
|
||||
for func in self.callbacks[event][:]:
|
||||
try:
|
||||
func(*args, **kwargs)
|
||||
except (Exception, KeyboardInterrupt):
|
||||
if self.print_on_error:
|
||||
print(
|
||||
"Error in callback {} (for {}), with arguments args {},kwargs {}:".format(
|
||||
func, event, args, kwargs
|
||||
)
|
||||
)
|
||||
self.shell.showtraceback()
|
||||
|
||||
# event_name -> prototype mapping
|
||||
available_events = {}
|
||||
|
||||
def _define_event(callback_function):
|
||||
available_events[callback_function.__name__] = callback_function
|
||||
return callback_function
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Callback prototypes
|
||||
#
|
||||
# No-op functions which describe the names of available events and the
|
||||
# signatures of callbacks for those events.
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
@_define_event
|
||||
def pre_execute():
|
||||
"""Fires before code is executed in response to user/frontend action.
|
||||
|
||||
This includes comm and widget messages and silent execution, as well as user
|
||||
code cells.
|
||||
"""
|
||||
pass
|
||||
|
||||
@_define_event
|
||||
def pre_run_cell(info):
|
||||
"""Fires before user-entered code runs.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
info : :class:`~IPython.core.interactiveshell.ExecutionInfo`
|
||||
An object containing information used for the code execution.
|
||||
"""
|
||||
pass
|
||||
|
||||
@_define_event
|
||||
def post_execute():
|
||||
"""Fires after code is executed in response to user/frontend action.
|
||||
|
||||
This includes comm and widget messages and silent execution, as well as user
|
||||
code cells.
|
||||
"""
|
||||
pass
|
||||
|
||||
@_define_event
|
||||
def post_run_cell(result):
|
||||
"""Fires after user-entered code runs.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
result : :class:`~IPython.core.interactiveshell.ExecutionResult`
|
||||
The object which will be returned as the execution result.
|
||||
"""
|
||||
pass
|
||||
|
||||
@_define_event
|
||||
def shell_initialized(ip):
|
||||
"""Fires after initialisation of :class:`~IPython.core.interactiveshell.InteractiveShell`.
|
||||
|
||||
This is before extensions and startup scripts are loaded, so it can only be
|
||||
set by subclassing.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
ip : :class:`~IPython.core.interactiveshell.InteractiveShell`
|
||||
The newly initialised shell.
|
||||
"""
|
||||
pass
|
192
.venv/lib/python3.12/site-packages/IPython/core/excolors.py
Normal file
192
.venv/lib/python3.12/site-packages/IPython/core/excolors.py
Normal file
@ -0,0 +1,192 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Color schemes for exception handling code in IPython.
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
#*****************************************************************************
|
||||
# Copyright (C) 2005-2006 Fernando Perez <fperez@colorado.edu>
|
||||
#
|
||||
# Distributed under the terms of the BSD License. The full license is in
|
||||
# the file COPYING, distributed as part of this software.
|
||||
#*****************************************************************************
|
||||
|
||||
from IPython.utils.coloransi import ColorSchemeTable, TermColors, ColorScheme
|
||||
|
||||
def exception_colors():
|
||||
"""Return a color table with fields for exception reporting.
|
||||
|
||||
The table is an instance of ColorSchemeTable with schemes added for
|
||||
'Neutral', 'Linux', 'LightBG' and 'NoColor' and fields for exception handling filled
|
||||
in.
|
||||
|
||||
Examples:
|
||||
|
||||
>>> ec = exception_colors()
|
||||
>>> ec.active_scheme_name
|
||||
''
|
||||
>>> print(ec.active_colors)
|
||||
None
|
||||
|
||||
Now we activate a color scheme:
|
||||
>>> ec.set_active_scheme('NoColor')
|
||||
>>> ec.active_scheme_name
|
||||
'NoColor'
|
||||
>>> sorted(ec.active_colors.keys())
|
||||
['Normal', 'breakpoint_disabled', 'breakpoint_enabled', 'caret', 'em',
|
||||
'excName', 'filename', 'filenameEm', 'line', 'lineno', 'linenoEm', 'name',
|
||||
'nameEm', 'normalEm', 'prompt', 'topline', 'vName', 'val', 'valEm']
|
||||
|
||||
"""
|
||||
|
||||
ex_colors = ColorSchemeTable()
|
||||
|
||||
# Populate it with color schemes
|
||||
C = TermColors # shorthand and local lookup
|
||||
ex_colors.add_scheme(
|
||||
ColorScheme(
|
||||
"NoColor",
|
||||
{
|
||||
# The color to be used for the top line
|
||||
"topline": C.NoColor,
|
||||
|
||||
# The colors to be used in the traceback
|
||||
"filename": C.NoColor,
|
||||
"lineno": C.NoColor,
|
||||
"name": C.NoColor,
|
||||
"vName": C.NoColor,
|
||||
"val": C.NoColor,
|
||||
"em": C.NoColor,
|
||||
|
||||
# Emphasized colors for the last frame of the traceback
|
||||
"normalEm": C.NoColor,
|
||||
"filenameEm": C.NoColor,
|
||||
"linenoEm": C.NoColor,
|
||||
"nameEm": C.NoColor,
|
||||
"valEm": C.NoColor,
|
||||
|
||||
# Colors for printing the exception
|
||||
"excName": C.NoColor,
|
||||
"line": C.NoColor,
|
||||
"caret": C.NoColor,
|
||||
"Normal": C.NoColor,
|
||||
# debugger
|
||||
"prompt": C.NoColor,
|
||||
"breakpoint_enabled": C.NoColor,
|
||||
"breakpoint_disabled": C.NoColor,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
# make some schemes as instances so we can copy them for modification easily
|
||||
ex_colors.add_scheme(
|
||||
ColorScheme(
|
||||
"Linux",
|
||||
{
|
||||
# The color to be used for the top line
|
||||
"topline": C.LightRed,
|
||||
# The colors to be used in the traceback
|
||||
"filename": C.Green,
|
||||
"lineno": C.Green,
|
||||
"name": C.Purple,
|
||||
"vName": C.Cyan,
|
||||
"val": C.Green,
|
||||
"em": C.LightCyan,
|
||||
# Emphasized colors for the last frame of the traceback
|
||||
"normalEm": C.LightCyan,
|
||||
"filenameEm": C.LightGreen,
|
||||
"linenoEm": C.LightGreen,
|
||||
"nameEm": C.LightPurple,
|
||||
"valEm": C.LightBlue,
|
||||
# Colors for printing the exception
|
||||
"excName": C.LightRed,
|
||||
"line": C.Yellow,
|
||||
"caret": C.White,
|
||||
"Normal": C.Normal,
|
||||
# debugger
|
||||
"prompt": C.Green,
|
||||
"breakpoint_enabled": C.LightRed,
|
||||
"breakpoint_disabled": C.Red,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
# For light backgrounds, swap dark/light colors
|
||||
ex_colors.add_scheme(
|
||||
ColorScheme(
|
||||
"LightBG",
|
||||
{
|
||||
# The color to be used for the top line
|
||||
"topline": C.Red,
|
||||
|
||||
# The colors to be used in the traceback
|
||||
"filename": C.LightGreen,
|
||||
"lineno": C.LightGreen,
|
||||
"name": C.LightPurple,
|
||||
"vName": C.Cyan,
|
||||
"val": C.LightGreen,
|
||||
"em": C.Cyan,
|
||||
|
||||
# Emphasized colors for the last frame of the traceback
|
||||
"normalEm": C.Cyan,
|
||||
"filenameEm": C.Green,
|
||||
"linenoEm": C.Green,
|
||||
"nameEm": C.Purple,
|
||||
"valEm": C.Blue,
|
||||
|
||||
# Colors for printing the exception
|
||||
"excName": C.Red,
|
||||
# "line": C.Brown, # brown often is displayed as yellow
|
||||
"line": C.Red,
|
||||
"caret": C.Normal,
|
||||
"Normal": C.Normal,
|
||||
# debugger
|
||||
"prompt": C.Blue,
|
||||
"breakpoint_enabled": C.LightRed,
|
||||
"breakpoint_disabled": C.Red,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
ex_colors.add_scheme(
|
||||
ColorScheme(
|
||||
"Neutral",
|
||||
{
|
||||
# The color to be used for the top line
|
||||
"topline": C.Red,
|
||||
# The colors to be used in the traceback
|
||||
"filename": C.LightGreen,
|
||||
"lineno": C.LightGreen,
|
||||
"name": C.LightPurple,
|
||||
"vName": C.Cyan,
|
||||
"val": C.LightGreen,
|
||||
"em": C.Cyan,
|
||||
# Emphasized colors for the last frame of the traceback
|
||||
"normalEm": C.Cyan,
|
||||
"filenameEm": C.Green,
|
||||
"linenoEm": C.Green,
|
||||
"nameEm": C.Purple,
|
||||
"valEm": C.Blue,
|
||||
# Colors for printing the exception
|
||||
"excName": C.Red,
|
||||
# line = C.Brown, # brown often is displayed as yellow
|
||||
"line": C.Red,
|
||||
"caret": C.Normal,
|
||||
"Normal": C.Normal,
|
||||
# debugger
|
||||
"prompt": C.Blue,
|
||||
"breakpoint_enabled": C.LightRed,
|
||||
"breakpoint_disabled": C.Red,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
# Hack: the 'neutral' colours are not very visible on a dark background on
|
||||
# Windows. Since Windows command prompts have a dark background by default, and
|
||||
# relatively few users are likely to alter that, we will use the 'Linux' colours,
|
||||
# designed for a dark background, as the default on Windows.
|
||||
if os.name == "nt":
|
||||
ex_colors.add_scheme(ex_colors['Linux'].copy('Neutral'))
|
||||
|
||||
return ex_colors
|
135
.venv/lib/python3.12/site-packages/IPython/core/extensions.py
Normal file
135
.venv/lib/python3.12/site-packages/IPython/core/extensions.py
Normal file
@ -0,0 +1,135 @@
|
||||
# encoding: utf-8
|
||||
"""A class for managing IPython extensions."""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
from importlib import import_module, reload
|
||||
|
||||
from traitlets.config.configurable import Configurable
|
||||
from IPython.utils.path import ensure_dir_exists
|
||||
from traitlets import Instance
|
||||
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Main class
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
BUILTINS_EXTS = {"storemagic": False, "autoreload": False}
|
||||
|
||||
|
||||
class ExtensionManager(Configurable):
|
||||
"""A class to manage IPython extensions.
|
||||
|
||||
An IPython extension is an importable Python module that has
|
||||
a function with the signature::
|
||||
|
||||
def load_ipython_extension(ipython):
|
||||
# Do things with ipython
|
||||
|
||||
This function is called after your extension is imported and the
|
||||
currently active :class:`InteractiveShell` instance is passed as
|
||||
the only argument. You can do anything you want with IPython at
|
||||
that point, including defining new magic and aliases, adding new
|
||||
components, etc.
|
||||
|
||||
You can also optionally define an :func:`unload_ipython_extension(ipython)`
|
||||
function, which will be called if the user unloads or reloads the extension.
|
||||
The extension manager will only call :func:`load_ipython_extension` again
|
||||
if the extension is reloaded.
|
||||
|
||||
You can put your extension modules anywhere you want, as long as
|
||||
they can be imported by Python's standard import mechanism.
|
||||
"""
|
||||
|
||||
shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True)
|
||||
|
||||
def __init__(self, shell=None, **kwargs):
|
||||
super(ExtensionManager, self).__init__(shell=shell, **kwargs)
|
||||
self.loaded = set()
|
||||
|
||||
def load_extension(self, module_str: str):
|
||||
"""Load an IPython extension by its module name.
|
||||
|
||||
Returns the string "already loaded" if the extension is already loaded,
|
||||
"no load function" if the module doesn't have a load_ipython_extension
|
||||
function, or None if it succeeded.
|
||||
"""
|
||||
try:
|
||||
return self._load_extension(module_str)
|
||||
except ModuleNotFoundError:
|
||||
if module_str in BUILTINS_EXTS:
|
||||
BUILTINS_EXTS[module_str] = True
|
||||
return self._load_extension("IPython.extensions." + module_str)
|
||||
raise
|
||||
|
||||
def _load_extension(self, module_str: str):
|
||||
if module_str in self.loaded:
|
||||
return "already loaded"
|
||||
|
||||
assert self.shell is not None
|
||||
|
||||
with self.shell.builtin_trap:
|
||||
if module_str not in sys.modules:
|
||||
mod = import_module(module_str)
|
||||
mod = sys.modules[module_str]
|
||||
if self._call_load_ipython_extension(mod):
|
||||
self.loaded.add(module_str)
|
||||
else:
|
||||
return "no load function"
|
||||
|
||||
def unload_extension(self, module_str: str):
|
||||
"""Unload an IPython extension by its module name.
|
||||
|
||||
This function looks up the extension's name in ``sys.modules`` and
|
||||
simply calls ``mod.unload_ipython_extension(self)``.
|
||||
|
||||
Returns the string "no unload function" if the extension doesn't define
|
||||
a function to unload itself, "not loaded" if the extension isn't loaded,
|
||||
otherwise None.
|
||||
"""
|
||||
if BUILTINS_EXTS.get(module_str, False) is True:
|
||||
module_str = "IPython.extensions." + module_str
|
||||
if module_str not in self.loaded:
|
||||
return "not loaded"
|
||||
|
||||
if module_str in sys.modules:
|
||||
mod = sys.modules[module_str]
|
||||
if self._call_unload_ipython_extension(mod):
|
||||
self.loaded.discard(module_str)
|
||||
else:
|
||||
return "no unload function"
|
||||
|
||||
def reload_extension(self, module_str: str):
|
||||
"""Reload an IPython extension by calling reload.
|
||||
|
||||
If the module has not been loaded before,
|
||||
:meth:`InteractiveShell.load_extension` is called. Otherwise
|
||||
:func:`reload` is called and then the :func:`load_ipython_extension`
|
||||
function of the module, if it exists is called.
|
||||
"""
|
||||
|
||||
if BUILTINS_EXTS.get(module_str, False) is True:
|
||||
module_str = "IPython.extensions." + module_str
|
||||
|
||||
if (module_str in self.loaded) and (module_str in sys.modules):
|
||||
self.unload_extension(module_str)
|
||||
mod = sys.modules[module_str]
|
||||
reload(mod)
|
||||
if self._call_load_ipython_extension(mod):
|
||||
self.loaded.add(module_str)
|
||||
else:
|
||||
self.load_extension(module_str)
|
||||
|
||||
def _call_load_ipython_extension(self, mod):
|
||||
if hasattr(mod, 'load_ipython_extension'):
|
||||
mod.load_ipython_extension(self.shell)
|
||||
return True
|
||||
|
||||
def _call_unload_ipython_extension(self, mod):
|
||||
if hasattr(mod, 'unload_ipython_extension'):
|
||||
mod.unload_ipython_extension(self.shell)
|
||||
return True
|
1090
.venv/lib/python3.12/site-packages/IPython/core/formatters.py
Normal file
1090
.venv/lib/python3.12/site-packages/IPython/core/formatters.py
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,24 @@
|
||||
# encoding: utf-8
|
||||
"""Simple function to call to get the current InteractiveShell instance
|
||||
"""
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (C) 2013 The IPython Development Team
|
||||
#
|
||||
# Distributed under the terms of the BSD License. The full license is in
|
||||
# the file COPYING, distributed as part of this software.
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Classes and functions
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
|
||||
def get_ipython():
|
||||
"""Get the global InteractiveShell instance.
|
||||
|
||||
Returns None if no InteractiveShell instance is registered.
|
||||
"""
|
||||
from IPython.core.interactiveshell import InteractiveShell
|
||||
if InteractiveShell.initialized():
|
||||
return InteractiveShell.instance()
|
895
.venv/lib/python3.12/site-packages/IPython/core/guarded_eval.py
Normal file
895
.venv/lib/python3.12/site-packages/IPython/core/guarded_eval.py
Normal file
@ -0,0 +1,895 @@
|
||||
from inspect import isclass, signature, Signature
|
||||
from typing import (
|
||||
Annotated,
|
||||
AnyStr,
|
||||
Callable,
|
||||
Dict,
|
||||
Literal,
|
||||
NamedTuple,
|
||||
NewType,
|
||||
Optional,
|
||||
Protocol,
|
||||
Set,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeGuard,
|
||||
Union,
|
||||
get_args,
|
||||
get_origin,
|
||||
is_typeddict,
|
||||
)
|
||||
import ast
|
||||
import builtins
|
||||
import collections
|
||||
import operator
|
||||
import sys
|
||||
from functools import cached_property
|
||||
from dataclasses import dataclass, field
|
||||
from types import MethodDescriptorType, ModuleType
|
||||
|
||||
from IPython.utils.decorators import undoc
|
||||
|
||||
|
||||
if sys.version_info < (3, 11):
|
||||
from typing_extensions import Self, LiteralString
|
||||
else:
|
||||
from typing import Self, LiteralString
|
||||
|
||||
if sys.version_info < (3, 12):
|
||||
from typing_extensions import TypeAliasType
|
||||
else:
|
||||
from typing import TypeAliasType
|
||||
|
||||
|
||||
@undoc
|
||||
class HasGetItem(Protocol):
|
||||
def __getitem__(self, key) -> None: ...
|
||||
|
||||
|
||||
@undoc
|
||||
class InstancesHaveGetItem(Protocol):
|
||||
def __call__(self, *args, **kwargs) -> HasGetItem: ...
|
||||
|
||||
|
||||
@undoc
|
||||
class HasGetAttr(Protocol):
|
||||
def __getattr__(self, key) -> None: ...
|
||||
|
||||
|
||||
@undoc
|
||||
class DoesNotHaveGetAttr(Protocol):
|
||||
pass
|
||||
|
||||
|
||||
# By default `__getattr__` is not explicitly implemented on most objects
|
||||
MayHaveGetattr = Union[HasGetAttr, DoesNotHaveGetAttr]
|
||||
|
||||
|
||||
def _unbind_method(func: Callable) -> Union[Callable, None]:
|
||||
"""Get unbound method for given bound method.
|
||||
|
||||
Returns None if cannot get unbound method, or method is already unbound.
|
||||
"""
|
||||
owner = getattr(func, "__self__", None)
|
||||
owner_class = type(owner)
|
||||
name = getattr(func, "__name__", None)
|
||||
instance_dict_overrides = getattr(owner, "__dict__", None)
|
||||
if (
|
||||
owner is not None
|
||||
and name
|
||||
and (
|
||||
not instance_dict_overrides
|
||||
or (instance_dict_overrides and name not in instance_dict_overrides)
|
||||
)
|
||||
):
|
||||
return getattr(owner_class, name)
|
||||
return None
|
||||
|
||||
|
||||
@undoc
|
||||
@dataclass
|
||||
class EvaluationPolicy:
|
||||
"""Definition of evaluation policy."""
|
||||
|
||||
allow_locals_access: bool = False
|
||||
allow_globals_access: bool = False
|
||||
allow_item_access: bool = False
|
||||
allow_attr_access: bool = False
|
||||
allow_builtins_access: bool = False
|
||||
allow_all_operations: bool = False
|
||||
allow_any_calls: bool = False
|
||||
allowed_calls: Set[Callable] = field(default_factory=set)
|
||||
|
||||
def can_get_item(self, value, item):
|
||||
return self.allow_item_access
|
||||
|
||||
def can_get_attr(self, value, attr):
|
||||
return self.allow_attr_access
|
||||
|
||||
def can_operate(self, dunders: Tuple[str, ...], a, b=None):
|
||||
if self.allow_all_operations:
|
||||
return True
|
||||
|
||||
def can_call(self, func):
|
||||
if self.allow_any_calls:
|
||||
return True
|
||||
|
||||
if func in self.allowed_calls:
|
||||
return True
|
||||
|
||||
owner_method = _unbind_method(func)
|
||||
|
||||
if owner_method and owner_method in self.allowed_calls:
|
||||
return True
|
||||
|
||||
|
||||
def _get_external(module_name: str, access_path: Sequence[str]):
|
||||
"""Get value from external module given a dotted access path.
|
||||
|
||||
Raises:
|
||||
* `KeyError` if module is removed not found, and
|
||||
* `AttributeError` if access path does not match an exported object
|
||||
"""
|
||||
member_type = sys.modules[module_name]
|
||||
for attr in access_path:
|
||||
member_type = getattr(member_type, attr)
|
||||
return member_type
|
||||
|
||||
|
||||
def _has_original_dunder_external(
|
||||
value,
|
||||
module_name: str,
|
||||
access_path: Sequence[str],
|
||||
method_name: str,
|
||||
):
|
||||
if module_name not in sys.modules:
|
||||
# LBYLB as it is faster
|
||||
return False
|
||||
try:
|
||||
member_type = _get_external(module_name, access_path)
|
||||
value_type = type(value)
|
||||
if type(value) == member_type:
|
||||
return True
|
||||
if method_name == "__getattribute__":
|
||||
# we have to short-circuit here due to an unresolved issue in
|
||||
# `isinstance` implementation: https://bugs.python.org/issue32683
|
||||
return False
|
||||
if isinstance(value, member_type):
|
||||
method = getattr(value_type, method_name, None)
|
||||
member_method = getattr(member_type, method_name, None)
|
||||
if member_method == method:
|
||||
return True
|
||||
except (AttributeError, KeyError):
|
||||
return False
|
||||
|
||||
|
||||
def _has_original_dunder(
|
||||
value, allowed_types, allowed_methods, allowed_external, method_name
|
||||
):
|
||||
# note: Python ignores `__getattr__`/`__getitem__` on instances,
|
||||
# we only need to check at class level
|
||||
value_type = type(value)
|
||||
|
||||
# strict type check passes → no need to check method
|
||||
if value_type in allowed_types:
|
||||
return True
|
||||
|
||||
method = getattr(value_type, method_name, None)
|
||||
|
||||
if method is None:
|
||||
return None
|
||||
|
||||
if method in allowed_methods:
|
||||
return True
|
||||
|
||||
for module_name, *access_path in allowed_external:
|
||||
if _has_original_dunder_external(value, module_name, access_path, method_name):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
@undoc
|
||||
@dataclass
|
||||
class SelectivePolicy(EvaluationPolicy):
|
||||
allowed_getitem: Set[InstancesHaveGetItem] = field(default_factory=set)
|
||||
allowed_getitem_external: Set[Tuple[str, ...]] = field(default_factory=set)
|
||||
|
||||
allowed_getattr: Set[MayHaveGetattr] = field(default_factory=set)
|
||||
allowed_getattr_external: Set[Tuple[str, ...]] = field(default_factory=set)
|
||||
|
||||
allowed_operations: Set = field(default_factory=set)
|
||||
allowed_operations_external: Set[Tuple[str, ...]] = field(default_factory=set)
|
||||
|
||||
_operation_methods_cache: Dict[str, Set[Callable]] = field(
|
||||
default_factory=dict, init=False
|
||||
)
|
||||
|
||||
def can_get_attr(self, value, attr):
|
||||
has_original_attribute = _has_original_dunder(
|
||||
value,
|
||||
allowed_types=self.allowed_getattr,
|
||||
allowed_methods=self._getattribute_methods,
|
||||
allowed_external=self.allowed_getattr_external,
|
||||
method_name="__getattribute__",
|
||||
)
|
||||
has_original_attr = _has_original_dunder(
|
||||
value,
|
||||
allowed_types=self.allowed_getattr,
|
||||
allowed_methods=self._getattr_methods,
|
||||
allowed_external=self.allowed_getattr_external,
|
||||
method_name="__getattr__",
|
||||
)
|
||||
|
||||
accept = False
|
||||
|
||||
# Many objects do not have `__getattr__`, this is fine.
|
||||
if has_original_attr is None and has_original_attribute:
|
||||
accept = True
|
||||
else:
|
||||
# Accept objects without modifications to `__getattr__` and `__getattribute__`
|
||||
accept = has_original_attr and has_original_attribute
|
||||
|
||||
if accept:
|
||||
# We still need to check for overridden properties.
|
||||
|
||||
value_class = type(value)
|
||||
if not hasattr(value_class, attr):
|
||||
return True
|
||||
|
||||
class_attr_val = getattr(value_class, attr)
|
||||
is_property = isinstance(class_attr_val, property)
|
||||
|
||||
if not is_property:
|
||||
return True
|
||||
|
||||
# Properties in allowed types are ok (although we do not include any
|
||||
# properties in our default allow list currently).
|
||||
if type(value) in self.allowed_getattr:
|
||||
return True # pragma: no cover
|
||||
|
||||
# Properties in subclasses of allowed types may be ok if not changed
|
||||
for module_name, *access_path in self.allowed_getattr_external:
|
||||
try:
|
||||
external_class = _get_external(module_name, access_path)
|
||||
external_class_attr_val = getattr(external_class, attr)
|
||||
except (KeyError, AttributeError):
|
||||
return False # pragma: no cover
|
||||
return class_attr_val == external_class_attr_val
|
||||
|
||||
return False
|
||||
|
||||
def can_get_item(self, value, item):
|
||||
"""Allow accessing `__getiitem__` of allow-listed instances unless it was not modified."""
|
||||
return _has_original_dunder(
|
||||
value,
|
||||
allowed_types=self.allowed_getitem,
|
||||
allowed_methods=self._getitem_methods,
|
||||
allowed_external=self.allowed_getitem_external,
|
||||
method_name="__getitem__",
|
||||
)
|
||||
|
||||
def can_operate(self, dunders: Tuple[str, ...], a, b=None):
|
||||
objects = [a]
|
||||
if b is not None:
|
||||
objects.append(b)
|
||||
return all(
|
||||
[
|
||||
_has_original_dunder(
|
||||
obj,
|
||||
allowed_types=self.allowed_operations,
|
||||
allowed_methods=self._operator_dunder_methods(dunder),
|
||||
allowed_external=self.allowed_operations_external,
|
||||
method_name=dunder,
|
||||
)
|
||||
for dunder in dunders
|
||||
for obj in objects
|
||||
]
|
||||
)
|
||||
|
||||
def _operator_dunder_methods(self, dunder: str) -> Set[Callable]:
|
||||
if dunder not in self._operation_methods_cache:
|
||||
self._operation_methods_cache[dunder] = self._safe_get_methods(
|
||||
self.allowed_operations, dunder
|
||||
)
|
||||
return self._operation_methods_cache[dunder]
|
||||
|
||||
@cached_property
|
||||
def _getitem_methods(self) -> Set[Callable]:
|
||||
return self._safe_get_methods(self.allowed_getitem, "__getitem__")
|
||||
|
||||
@cached_property
|
||||
def _getattr_methods(self) -> Set[Callable]:
|
||||
return self._safe_get_methods(self.allowed_getattr, "__getattr__")
|
||||
|
||||
@cached_property
|
||||
def _getattribute_methods(self) -> Set[Callable]:
|
||||
return self._safe_get_methods(self.allowed_getattr, "__getattribute__")
|
||||
|
||||
def _safe_get_methods(self, classes, name) -> Set[Callable]:
|
||||
return {
|
||||
method
|
||||
for class_ in classes
|
||||
for method in [getattr(class_, name, None)]
|
||||
if method
|
||||
}
|
||||
|
||||
|
||||
class _DummyNamedTuple(NamedTuple):
|
||||
"""Used internally to retrieve methods of named tuple instance."""
|
||||
|
||||
|
||||
class EvaluationContext(NamedTuple):
|
||||
#: Local namespace
|
||||
locals: dict
|
||||
#: Global namespace
|
||||
globals: dict
|
||||
#: Evaluation policy identifier
|
||||
evaluation: Literal["forbidden", "minimal", "limited", "unsafe", "dangerous"] = (
|
||||
"forbidden"
|
||||
)
|
||||
#: Whether the evaluation of code takes place inside of a subscript.
|
||||
#: Useful for evaluating ``:-1, 'col'`` in ``df[:-1, 'col']``.
|
||||
in_subscript: bool = False
|
||||
|
||||
|
||||
class _IdentitySubscript:
|
||||
"""Returns the key itself when item is requested via subscript."""
|
||||
|
||||
def __getitem__(self, key):
|
||||
return key
|
||||
|
||||
|
||||
IDENTITY_SUBSCRIPT = _IdentitySubscript()
|
||||
SUBSCRIPT_MARKER = "__SUBSCRIPT_SENTINEL__"
|
||||
UNKNOWN_SIGNATURE = Signature()
|
||||
NOT_EVALUATED = object()
|
||||
|
||||
|
||||
class GuardRejection(Exception):
|
||||
"""Exception raised when guard rejects evaluation attempt."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def guarded_eval(code: str, context: EvaluationContext):
|
||||
"""Evaluate provided code in the evaluation context.
|
||||
|
||||
If evaluation policy given by context is set to ``forbidden``
|
||||
no evaluation will be performed; if it is set to ``dangerous``
|
||||
standard :func:`eval` will be used; finally, for any other,
|
||||
policy :func:`eval_node` will be called on parsed AST.
|
||||
"""
|
||||
locals_ = context.locals
|
||||
|
||||
if context.evaluation == "forbidden":
|
||||
raise GuardRejection("Forbidden mode")
|
||||
|
||||
# note: not using `ast.literal_eval` as it does not implement
|
||||
# getitem at all, for example it fails on simple `[0][1]`
|
||||
|
||||
if context.in_subscript:
|
||||
# syntactic sugar for ellipsis (:) is only available in subscripts
|
||||
# so we need to trick the ast parser into thinking that we have
|
||||
# a subscript, but we need to be able to later recognise that we did
|
||||
# it so we can ignore the actual __getitem__ operation
|
||||
if not code:
|
||||
return tuple()
|
||||
locals_ = locals_.copy()
|
||||
locals_[SUBSCRIPT_MARKER] = IDENTITY_SUBSCRIPT
|
||||
code = SUBSCRIPT_MARKER + "[" + code + "]"
|
||||
context = EvaluationContext(**{**context._asdict(), **{"locals": locals_}})
|
||||
|
||||
if context.evaluation == "dangerous":
|
||||
return eval(code, context.globals, context.locals)
|
||||
|
||||
expression = ast.parse(code, mode="eval")
|
||||
|
||||
return eval_node(expression, context)
|
||||
|
||||
|
||||
BINARY_OP_DUNDERS: Dict[Type[ast.operator], Tuple[str]] = {
|
||||
ast.Add: ("__add__",),
|
||||
ast.Sub: ("__sub__",),
|
||||
ast.Mult: ("__mul__",),
|
||||
ast.Div: ("__truediv__",),
|
||||
ast.FloorDiv: ("__floordiv__",),
|
||||
ast.Mod: ("__mod__",),
|
||||
ast.Pow: ("__pow__",),
|
||||
ast.LShift: ("__lshift__",),
|
||||
ast.RShift: ("__rshift__",),
|
||||
ast.BitOr: ("__or__",),
|
||||
ast.BitXor: ("__xor__",),
|
||||
ast.BitAnd: ("__and__",),
|
||||
ast.MatMult: ("__matmul__",),
|
||||
}
|
||||
|
||||
COMP_OP_DUNDERS: Dict[Type[ast.cmpop], Tuple[str, ...]] = {
|
||||
ast.Eq: ("__eq__",),
|
||||
ast.NotEq: ("__ne__", "__eq__"),
|
||||
ast.Lt: ("__lt__", "__gt__"),
|
||||
ast.LtE: ("__le__", "__ge__"),
|
||||
ast.Gt: ("__gt__", "__lt__"),
|
||||
ast.GtE: ("__ge__", "__le__"),
|
||||
ast.In: ("__contains__",),
|
||||
# Note: ast.Is, ast.IsNot, ast.NotIn are handled specially
|
||||
}
|
||||
|
||||
UNARY_OP_DUNDERS: Dict[Type[ast.unaryop], Tuple[str, ...]] = {
|
||||
ast.USub: ("__neg__",),
|
||||
ast.UAdd: ("__pos__",),
|
||||
# we have to check both __inv__ and __invert__!
|
||||
ast.Invert: ("__invert__", "__inv__"),
|
||||
ast.Not: ("__not__",),
|
||||
}
|
||||
|
||||
|
||||
class ImpersonatingDuck:
|
||||
"""A dummy class used to create objects of other classes without calling their ``__init__``"""
|
||||
|
||||
# no-op: override __class__ to impersonate
|
||||
|
||||
|
||||
class _Duck:
|
||||
"""A dummy class used to create objects pretending to have given attributes"""
|
||||
|
||||
def __init__(self, attributes: Optional[dict] = None, items: Optional[dict] = None):
|
||||
self.attributes = attributes or {}
|
||||
self.items = items or {}
|
||||
|
||||
def __getattr__(self, attr: str):
|
||||
return self.attributes[attr]
|
||||
|
||||
def __hasattr__(self, attr: str):
|
||||
return attr in self.attributes
|
||||
|
||||
def __dir__(self):
|
||||
return [*dir(super), *self.attributes]
|
||||
|
||||
def __getitem__(self, key: str):
|
||||
return self.items[key]
|
||||
|
||||
def __hasitem__(self, key: str):
|
||||
return self.items[key]
|
||||
|
||||
def _ipython_key_completions_(self):
|
||||
return self.items.keys()
|
||||
|
||||
|
||||
def _find_dunder(node_op, dunders) -> Union[Tuple[str, ...], None]:
|
||||
dunder = None
|
||||
for op, candidate_dunder in dunders.items():
|
||||
if isinstance(node_op, op):
|
||||
dunder = candidate_dunder
|
||||
return dunder
|
||||
|
||||
|
||||
def eval_node(node: Union[ast.AST, None], context: EvaluationContext):
|
||||
"""Evaluate AST node in provided context.
|
||||
|
||||
Applies evaluation restrictions defined in the context. Currently does not support evaluation of functions with keyword arguments.
|
||||
|
||||
Does not evaluate actions that always have side effects:
|
||||
|
||||
- class definitions (``class sth: ...``)
|
||||
- function definitions (``def sth: ...``)
|
||||
- variable assignments (``x = 1``)
|
||||
- augmented assignments (``x += 1``)
|
||||
- deletions (``del x``)
|
||||
|
||||
Does not evaluate operations which do not return values:
|
||||
|
||||
- assertions (``assert x``)
|
||||
- pass (``pass``)
|
||||
- imports (``import x``)
|
||||
- control flow:
|
||||
|
||||
- conditionals (``if x:``) except for ternary IfExp (``a if x else b``)
|
||||
- loops (``for`` and ``while``)
|
||||
- exception handling
|
||||
|
||||
The purpose of this function is to guard against unwanted side-effects;
|
||||
it does not give guarantees on protection from malicious code execution.
|
||||
"""
|
||||
policy = EVALUATION_POLICIES[context.evaluation]
|
||||
if node is None:
|
||||
return None
|
||||
if isinstance(node, ast.Expression):
|
||||
return eval_node(node.body, context)
|
||||
if isinstance(node, ast.BinOp):
|
||||
left = eval_node(node.left, context)
|
||||
right = eval_node(node.right, context)
|
||||
dunders = _find_dunder(node.op, BINARY_OP_DUNDERS)
|
||||
if dunders:
|
||||
if policy.can_operate(dunders, left, right):
|
||||
return getattr(left, dunders[0])(right)
|
||||
else:
|
||||
raise GuardRejection(
|
||||
f"Operation (`{dunders}`) for",
|
||||
type(left),
|
||||
f"not allowed in {context.evaluation} mode",
|
||||
)
|
||||
if isinstance(node, ast.Compare):
|
||||
left = eval_node(node.left, context)
|
||||
all_true = True
|
||||
negate = False
|
||||
for op, right in zip(node.ops, node.comparators):
|
||||
right = eval_node(right, context)
|
||||
dunder = None
|
||||
dunders = _find_dunder(op, COMP_OP_DUNDERS)
|
||||
if not dunders:
|
||||
if isinstance(op, ast.NotIn):
|
||||
dunders = COMP_OP_DUNDERS[ast.In]
|
||||
negate = True
|
||||
if isinstance(op, ast.Is):
|
||||
dunder = "is_"
|
||||
if isinstance(op, ast.IsNot):
|
||||
dunder = "is_"
|
||||
negate = True
|
||||
if not dunder and dunders:
|
||||
dunder = dunders[0]
|
||||
if dunder:
|
||||
a, b = (right, left) if dunder == "__contains__" else (left, right)
|
||||
if dunder == "is_" or dunders and policy.can_operate(dunders, a, b):
|
||||
result = getattr(operator, dunder)(a, b)
|
||||
if negate:
|
||||
result = not result
|
||||
if not result:
|
||||
all_true = False
|
||||
left = right
|
||||
else:
|
||||
raise GuardRejection(
|
||||
f"Comparison (`{dunder}`) for",
|
||||
type(left),
|
||||
f"not allowed in {context.evaluation} mode",
|
||||
)
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Comparison `{dunder}` not supported"
|
||||
) # pragma: no cover
|
||||
return all_true
|
||||
if isinstance(node, ast.Constant):
|
||||
return node.value
|
||||
if isinstance(node, ast.Tuple):
|
||||
return tuple(eval_node(e, context) for e in node.elts)
|
||||
if isinstance(node, ast.List):
|
||||
return [eval_node(e, context) for e in node.elts]
|
||||
if isinstance(node, ast.Set):
|
||||
return {eval_node(e, context) for e in node.elts}
|
||||
if isinstance(node, ast.Dict):
|
||||
return dict(
|
||||
zip(
|
||||
[eval_node(k, context) for k in node.keys],
|
||||
[eval_node(v, context) for v in node.values],
|
||||
)
|
||||
)
|
||||
if isinstance(node, ast.Slice):
|
||||
return slice(
|
||||
eval_node(node.lower, context),
|
||||
eval_node(node.upper, context),
|
||||
eval_node(node.step, context),
|
||||
)
|
||||
if isinstance(node, ast.UnaryOp):
|
||||
value = eval_node(node.operand, context)
|
||||
dunders = _find_dunder(node.op, UNARY_OP_DUNDERS)
|
||||
if dunders:
|
||||
if policy.can_operate(dunders, value):
|
||||
return getattr(value, dunders[0])()
|
||||
else:
|
||||
raise GuardRejection(
|
||||
f"Operation (`{dunders}`) for",
|
||||
type(value),
|
||||
f"not allowed in {context.evaluation} mode",
|
||||
)
|
||||
if isinstance(node, ast.Subscript):
|
||||
value = eval_node(node.value, context)
|
||||
slice_ = eval_node(node.slice, context)
|
||||
if policy.can_get_item(value, slice_):
|
||||
return value[slice_]
|
||||
raise GuardRejection(
|
||||
"Subscript access (`__getitem__`) for",
|
||||
type(value), # not joined to avoid calling `repr`
|
||||
f" not allowed in {context.evaluation} mode",
|
||||
)
|
||||
if isinstance(node, ast.Name):
|
||||
return _eval_node_name(node.id, context)
|
||||
if isinstance(node, ast.Attribute):
|
||||
value = eval_node(node.value, context)
|
||||
if policy.can_get_attr(value, node.attr):
|
||||
return getattr(value, node.attr)
|
||||
raise GuardRejection(
|
||||
"Attribute access (`__getattr__`) for",
|
||||
type(value), # not joined to avoid calling `repr`
|
||||
f"not allowed in {context.evaluation} mode",
|
||||
)
|
||||
if isinstance(node, ast.IfExp):
|
||||
test = eval_node(node.test, context)
|
||||
if test:
|
||||
return eval_node(node.body, context)
|
||||
else:
|
||||
return eval_node(node.orelse, context)
|
||||
if isinstance(node, ast.Call):
|
||||
func = eval_node(node.func, context)
|
||||
if policy.can_call(func) and not node.keywords:
|
||||
args = [eval_node(arg, context) for arg in node.args]
|
||||
return func(*args)
|
||||
if isclass(func):
|
||||
# this code path gets entered when calling class e.g. `MyClass()`
|
||||
# or `my_instance.__class__()` - in both cases `func` is `MyClass`.
|
||||
# Should return `MyClass` if `__new__` is not overridden,
|
||||
# otherwise whatever `__new__` return type is.
|
||||
overridden_return_type = _eval_return_type(func.__new__, node, context)
|
||||
if overridden_return_type is not NOT_EVALUATED:
|
||||
return overridden_return_type
|
||||
return _create_duck_for_heap_type(func)
|
||||
else:
|
||||
return_type = _eval_return_type(func, node, context)
|
||||
if return_type is not NOT_EVALUATED:
|
||||
return return_type
|
||||
raise GuardRejection(
|
||||
"Call for",
|
||||
func, # not joined to avoid calling `repr`
|
||||
f"not allowed in {context.evaluation} mode",
|
||||
)
|
||||
raise ValueError("Unhandled node", ast.dump(node))
|
||||
|
||||
|
||||
def _eval_return_type(func: Callable, node: ast.Call, context: EvaluationContext):
|
||||
"""Evaluate return type of a given callable function.
|
||||
|
||||
Returns the built-in type, a duck or NOT_EVALUATED sentinel.
|
||||
"""
|
||||
try:
|
||||
sig = signature(func)
|
||||
except ValueError:
|
||||
sig = UNKNOWN_SIGNATURE
|
||||
# if annotation was not stringized, or it was stringized
|
||||
# but resolved by signature call we know the return type
|
||||
not_empty = sig.return_annotation is not Signature.empty
|
||||
if not_empty:
|
||||
return _resolve_annotation(sig.return_annotation, sig, func, node, context)
|
||||
return NOT_EVALUATED
|
||||
|
||||
|
||||
def _resolve_annotation(
|
||||
annotation,
|
||||
sig: Signature,
|
||||
func: Callable,
|
||||
node: ast.Call,
|
||||
context: EvaluationContext,
|
||||
):
|
||||
"""Resolve annotation created by user with `typing` module and custom objects."""
|
||||
annotation = (
|
||||
_eval_node_name(annotation, context)
|
||||
if isinstance(annotation, str)
|
||||
else annotation
|
||||
)
|
||||
origin = get_origin(annotation)
|
||||
if annotation is Self and hasattr(func, "__self__"):
|
||||
return func.__self__
|
||||
elif origin is Literal:
|
||||
type_args = get_args(annotation)
|
||||
if len(type_args) == 1:
|
||||
return type_args[0]
|
||||
elif annotation is LiteralString:
|
||||
return ""
|
||||
elif annotation is AnyStr:
|
||||
index = None
|
||||
for i, (key, value) in enumerate(sig.parameters.items()):
|
||||
if value.annotation is AnyStr:
|
||||
index = i
|
||||
break
|
||||
if index is not None and index < len(node.args):
|
||||
return eval_node(node.args[index], context)
|
||||
elif origin is TypeGuard:
|
||||
return bool()
|
||||
elif origin is Union:
|
||||
attributes = [
|
||||
attr
|
||||
for type_arg in get_args(annotation)
|
||||
for attr in dir(_resolve_annotation(type_arg, sig, func, node, context))
|
||||
]
|
||||
return _Duck(attributes=dict.fromkeys(attributes))
|
||||
elif is_typeddict(annotation):
|
||||
return _Duck(
|
||||
attributes=dict.fromkeys(dir(dict())),
|
||||
items={
|
||||
k: _resolve_annotation(v, sig, func, node, context)
|
||||
for k, v in annotation.__annotations__.items()
|
||||
},
|
||||
)
|
||||
elif hasattr(annotation, "_is_protocol"):
|
||||
return _Duck(attributes=dict.fromkeys(dir(annotation)))
|
||||
elif origin is Annotated:
|
||||
type_arg = get_args(annotation)[0]
|
||||
return _resolve_annotation(type_arg, sig, func, node, context)
|
||||
elif isinstance(annotation, NewType):
|
||||
return _eval_or_create_duck(annotation.__supertype__, node, context)
|
||||
elif isinstance(annotation, TypeAliasType):
|
||||
return _eval_or_create_duck(annotation.__value__, node, context)
|
||||
else:
|
||||
return _eval_or_create_duck(annotation, node, context)
|
||||
|
||||
|
||||
def _eval_node_name(node_id: str, context: EvaluationContext):
|
||||
policy = EVALUATION_POLICIES[context.evaluation]
|
||||
if policy.allow_locals_access and node_id in context.locals:
|
||||
return context.locals[node_id]
|
||||
if policy.allow_globals_access and node_id in context.globals:
|
||||
return context.globals[node_id]
|
||||
if policy.allow_builtins_access and hasattr(builtins, node_id):
|
||||
# note: do not use __builtins__, it is implementation detail of cPython
|
||||
return getattr(builtins, node_id)
|
||||
if not policy.allow_globals_access and not policy.allow_locals_access:
|
||||
raise GuardRejection(
|
||||
f"Namespace access not allowed in {context.evaluation} mode"
|
||||
)
|
||||
else:
|
||||
raise NameError(f"{node_id} not found in locals, globals, nor builtins")
|
||||
|
||||
|
||||
def _eval_or_create_duck(duck_type, node: ast.Call, context: EvaluationContext):
|
||||
policy = EVALUATION_POLICIES[context.evaluation]
|
||||
# if allow-listed builtin is on type annotation, instantiate it
|
||||
if policy.can_call(duck_type) and not node.keywords:
|
||||
args = [eval_node(arg, context) for arg in node.args]
|
||||
return duck_type(*args)
|
||||
# if custom class is in type annotation, mock it
|
||||
return _create_duck_for_heap_type(duck_type)
|
||||
|
||||
|
||||
def _create_duck_for_heap_type(duck_type):
|
||||
"""Create an imitation of an object of a given type (a duck).
|
||||
|
||||
Returns the duck or NOT_EVALUATED sentinel if duck could not be created.
|
||||
"""
|
||||
duck = ImpersonatingDuck()
|
||||
try:
|
||||
# this only works for heap types, not builtins
|
||||
duck.__class__ = duck_type
|
||||
return duck
|
||||
except TypeError:
|
||||
pass
|
||||
return NOT_EVALUATED
|
||||
|
||||
|
||||
SUPPORTED_EXTERNAL_GETITEM = {
|
||||
("pandas", "core", "indexing", "_iLocIndexer"),
|
||||
("pandas", "core", "indexing", "_LocIndexer"),
|
||||
("pandas", "DataFrame"),
|
||||
("pandas", "Series"),
|
||||
("numpy", "ndarray"),
|
||||
("numpy", "void"),
|
||||
}
|
||||
|
||||
|
||||
BUILTIN_GETITEM: Set[InstancesHaveGetItem] = {
|
||||
dict,
|
||||
str, # type: ignore[arg-type]
|
||||
bytes, # type: ignore[arg-type]
|
||||
list,
|
||||
tuple,
|
||||
collections.defaultdict,
|
||||
collections.deque,
|
||||
collections.OrderedDict,
|
||||
collections.ChainMap,
|
||||
collections.UserDict,
|
||||
collections.UserList,
|
||||
collections.UserString, # type: ignore[arg-type]
|
||||
_DummyNamedTuple,
|
||||
_IdentitySubscript,
|
||||
}
|
||||
|
||||
|
||||
def _list_methods(cls, source=None):
|
||||
"""For use on immutable objects or with methods returning a copy"""
|
||||
return [getattr(cls, k) for k in (source if source else dir(cls))]
|
||||
|
||||
|
||||
dict_non_mutating_methods = ("copy", "keys", "values", "items")
|
||||
list_non_mutating_methods = ("copy", "index", "count")
|
||||
set_non_mutating_methods = set(dir(set)) & set(dir(frozenset))
|
||||
|
||||
|
||||
dict_keys: Type[collections.abc.KeysView] = type({}.keys())
|
||||
|
||||
NUMERICS = {int, float, complex}
|
||||
|
||||
ALLOWED_CALLS = {
|
||||
bytes,
|
||||
*_list_methods(bytes),
|
||||
dict,
|
||||
*_list_methods(dict, dict_non_mutating_methods),
|
||||
dict_keys.isdisjoint,
|
||||
list,
|
||||
*_list_methods(list, list_non_mutating_methods),
|
||||
set,
|
||||
*_list_methods(set, set_non_mutating_methods),
|
||||
frozenset,
|
||||
*_list_methods(frozenset),
|
||||
range,
|
||||
str,
|
||||
*_list_methods(str),
|
||||
tuple,
|
||||
*_list_methods(tuple),
|
||||
*NUMERICS,
|
||||
*[method for numeric_cls in NUMERICS for method in _list_methods(numeric_cls)],
|
||||
collections.deque,
|
||||
*_list_methods(collections.deque, list_non_mutating_methods),
|
||||
collections.defaultdict,
|
||||
*_list_methods(collections.defaultdict, dict_non_mutating_methods),
|
||||
collections.OrderedDict,
|
||||
*_list_methods(collections.OrderedDict, dict_non_mutating_methods),
|
||||
collections.UserDict,
|
||||
*_list_methods(collections.UserDict, dict_non_mutating_methods),
|
||||
collections.UserList,
|
||||
*_list_methods(collections.UserList, list_non_mutating_methods),
|
||||
collections.UserString,
|
||||
*_list_methods(collections.UserString, dir(str)),
|
||||
collections.Counter,
|
||||
*_list_methods(collections.Counter, dict_non_mutating_methods),
|
||||
collections.Counter.elements,
|
||||
collections.Counter.most_common,
|
||||
}
|
||||
|
||||
BUILTIN_GETATTR: Set[MayHaveGetattr] = {
|
||||
*BUILTIN_GETITEM,
|
||||
set,
|
||||
frozenset,
|
||||
object,
|
||||
type, # `type` handles a lot of generic cases, e.g. numbers as in `int.real`.
|
||||
*NUMERICS,
|
||||
dict_keys,
|
||||
MethodDescriptorType,
|
||||
ModuleType,
|
||||
}
|
||||
|
||||
|
||||
BUILTIN_OPERATIONS = {*BUILTIN_GETATTR}
|
||||
|
||||
EVALUATION_POLICIES = {
|
||||
"minimal": EvaluationPolicy(
|
||||
allow_builtins_access=True,
|
||||
allow_locals_access=False,
|
||||
allow_globals_access=False,
|
||||
allow_item_access=False,
|
||||
allow_attr_access=False,
|
||||
allowed_calls=set(),
|
||||
allow_any_calls=False,
|
||||
allow_all_operations=False,
|
||||
),
|
||||
"limited": SelectivePolicy(
|
||||
allowed_getitem=BUILTIN_GETITEM,
|
||||
allowed_getitem_external=SUPPORTED_EXTERNAL_GETITEM,
|
||||
allowed_getattr=BUILTIN_GETATTR,
|
||||
allowed_getattr_external={
|
||||
# pandas Series/Frame implements custom `__getattr__`
|
||||
("pandas", "DataFrame"),
|
||||
("pandas", "Series"),
|
||||
},
|
||||
allowed_operations=BUILTIN_OPERATIONS,
|
||||
allow_builtins_access=True,
|
||||
allow_locals_access=True,
|
||||
allow_globals_access=True,
|
||||
allowed_calls=ALLOWED_CALLS,
|
||||
),
|
||||
"unsafe": EvaluationPolicy(
|
||||
allow_builtins_access=True,
|
||||
allow_locals_access=True,
|
||||
allow_globals_access=True,
|
||||
allow_attr_access=True,
|
||||
allow_item_access=True,
|
||||
allow_any_calls=True,
|
||||
allow_all_operations=True,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
__all__ = [
|
||||
"guarded_eval",
|
||||
"eval_node",
|
||||
"GuardRejection",
|
||||
"EvaluationContext",
|
||||
"_unbind_method",
|
||||
]
|
989
.venv/lib/python3.12/site-packages/IPython/core/history.py
Normal file
989
.venv/lib/python3.12/site-packages/IPython/core/history.py
Normal file
@ -0,0 +1,989 @@
|
||||
""" History related magics and functionality """
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
|
||||
import atexit
|
||||
import datetime
|
||||
import re
|
||||
import sqlite3
|
||||
import threading
|
||||
from pathlib import Path
|
||||
|
||||
from decorator import decorator
|
||||
from traitlets import (
|
||||
Any,
|
||||
Bool,
|
||||
Dict,
|
||||
Instance,
|
||||
Integer,
|
||||
List,
|
||||
TraitError,
|
||||
Unicode,
|
||||
Union,
|
||||
default,
|
||||
observe,
|
||||
)
|
||||
from traitlets.config.configurable import LoggingConfigurable
|
||||
|
||||
from IPython.paths import locate_profile
|
||||
from IPython.utils.decorators import undoc
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Classes and functions
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
@undoc
|
||||
class DummyDB(object):
|
||||
"""Dummy DB that will act as a black hole for history.
|
||||
|
||||
Only used in the absence of sqlite"""
|
||||
def execute(*args, **kwargs):
|
||||
return []
|
||||
|
||||
def commit(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def __enter__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def __exit__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
|
||||
@decorator
|
||||
def only_when_enabled(f, self, *a, **kw):
|
||||
"""Decorator: return an empty list in the absence of sqlite."""
|
||||
if not self.enabled:
|
||||
return []
|
||||
else:
|
||||
return f(self, *a, **kw)
|
||||
|
||||
|
||||
# use 16kB as threshold for whether a corrupt history db should be saved
|
||||
# that should be at least 100 entries or so
|
||||
_SAVE_DB_SIZE = 16384
|
||||
|
||||
@decorator
|
||||
def catch_corrupt_db(f, self, *a, **kw):
|
||||
"""A decorator which wraps HistoryAccessor method calls to catch errors from
|
||||
a corrupt SQLite database, move the old database out of the way, and create
|
||||
a new one.
|
||||
|
||||
We avoid clobbering larger databases because this may be triggered due to filesystem issues,
|
||||
not just a corrupt file.
|
||||
"""
|
||||
try:
|
||||
return f(self, *a, **kw)
|
||||
except (sqlite3.DatabaseError, sqlite3.OperationalError) as e:
|
||||
self._corrupt_db_counter += 1
|
||||
self.log.error("Failed to open SQLite history %s (%s).", self.hist_file, e)
|
||||
if self.hist_file != ':memory:':
|
||||
if self._corrupt_db_counter > self._corrupt_db_limit:
|
||||
self.hist_file = ':memory:'
|
||||
self.log.error("Failed to load history too many times, history will not be saved.")
|
||||
elif self.hist_file.is_file():
|
||||
# move the file out of the way
|
||||
base = str(self.hist_file.parent / self.hist_file.stem)
|
||||
ext = self.hist_file.suffix
|
||||
size = self.hist_file.stat().st_size
|
||||
if size >= _SAVE_DB_SIZE:
|
||||
# if there's significant content, avoid clobbering
|
||||
now = datetime.datetime.now().isoformat().replace(':', '.')
|
||||
newpath = base + '-corrupt-' + now + ext
|
||||
# don't clobber previous corrupt backups
|
||||
for i in range(100):
|
||||
if not Path(newpath).exists():
|
||||
break
|
||||
else:
|
||||
newpath = base + '-corrupt-' + now + (u'-%i' % i) + ext
|
||||
else:
|
||||
# not much content, possibly empty; don't worry about clobbering
|
||||
# maybe we should just delete it?
|
||||
newpath = base + '-corrupt' + ext
|
||||
self.hist_file.rename(newpath)
|
||||
self.log.error("History file was moved to %s and a new file created.", newpath)
|
||||
self.init_db()
|
||||
return []
|
||||
else:
|
||||
# Failed with :memory:, something serious is wrong
|
||||
raise
|
||||
|
||||
|
||||
class HistoryAccessorBase(LoggingConfigurable):
|
||||
"""An abstract class for History Accessors """
|
||||
|
||||
def get_tail(self, n=10, raw=True, output=False, include_latest=False):
|
||||
raise NotImplementedError
|
||||
|
||||
def search(self, pattern="*", raw=True, search_raw=True,
|
||||
output=False, n=None, unique=False):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_range(self, session, start=1, stop=None, raw=True,output=False):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_range_by_str(self, rangestr, raw=True, output=False):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class HistoryAccessor(HistoryAccessorBase):
|
||||
"""Access the history database without adding to it.
|
||||
|
||||
This is intended for use by standalone history tools. IPython shells use
|
||||
HistoryManager, below, which is a subclass of this."""
|
||||
|
||||
# counter for init_db retries, so we don't keep trying over and over
|
||||
_corrupt_db_counter = 0
|
||||
# after two failures, fallback on :memory:
|
||||
_corrupt_db_limit = 2
|
||||
|
||||
# String holding the path to the history file
|
||||
hist_file = Union(
|
||||
[Instance(Path), Unicode()],
|
||||
help="""Path to file to use for SQLite history database.
|
||||
|
||||
By default, IPython will put the history database in the IPython
|
||||
profile directory. If you would rather share one history among
|
||||
profiles, you can set this value in each, so that they are consistent.
|
||||
|
||||
Due to an issue with fcntl, SQLite is known to misbehave on some NFS
|
||||
mounts. If you see IPython hanging, try setting this to something on a
|
||||
local disk, e.g::
|
||||
|
||||
ipython --HistoryManager.hist_file=/tmp/ipython_hist.sqlite
|
||||
|
||||
you can also use the specific value `:memory:` (including the colon
|
||||
at both end but not the back ticks), to avoid creating an history file.
|
||||
|
||||
""",
|
||||
).tag(config=True)
|
||||
|
||||
enabled = Bool(True,
|
||||
help="""enable the SQLite history
|
||||
|
||||
set enabled=False to disable the SQLite history,
|
||||
in which case there will be no stored history, no SQLite connection,
|
||||
and no background saving thread. This may be necessary in some
|
||||
threaded environments where IPython is embedded.
|
||||
""",
|
||||
).tag(config=True)
|
||||
|
||||
connection_options = Dict(
|
||||
help="""Options for configuring the SQLite connection
|
||||
|
||||
These options are passed as keyword args to sqlite3.connect
|
||||
when establishing database connections.
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
@default("connection_options")
|
||||
def _default_connection_options(self):
|
||||
return dict(check_same_thread=False)
|
||||
|
||||
# The SQLite database
|
||||
db = Any()
|
||||
@observe('db')
|
||||
def _db_changed(self, change):
|
||||
"""validate the db, since it can be an Instance of two different types"""
|
||||
new = change['new']
|
||||
connection_types = (DummyDB, sqlite3.Connection)
|
||||
if not isinstance(new, connection_types):
|
||||
msg = "%s.db must be sqlite3 Connection or DummyDB, not %r" % \
|
||||
(self.__class__.__name__, new)
|
||||
raise TraitError(msg)
|
||||
|
||||
def __init__(self, profile="default", hist_file="", **traits):
|
||||
"""Create a new history accessor.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
profile : str
|
||||
The name of the profile from which to open history.
|
||||
hist_file : str
|
||||
Path to an SQLite history database stored by IPython. If specified,
|
||||
hist_file overrides profile.
|
||||
config : :class:`~traitlets.config.loader.Config`
|
||||
Config object. hist_file can also be set through this.
|
||||
"""
|
||||
super(HistoryAccessor, self).__init__(**traits)
|
||||
# defer setting hist_file from kwarg until after init,
|
||||
# otherwise the default kwarg value would clobber any value
|
||||
# set by config
|
||||
if hist_file:
|
||||
self.hist_file = hist_file
|
||||
|
||||
try:
|
||||
self.hist_file
|
||||
except TraitError:
|
||||
# No one has set the hist_file, yet.
|
||||
self.hist_file = self._get_hist_file_name(profile)
|
||||
|
||||
self.init_db()
|
||||
|
||||
def _get_hist_file_name(self, profile='default'):
|
||||
"""Find the history file for the given profile name.
|
||||
|
||||
This is overridden by the HistoryManager subclass, to use the shell's
|
||||
active profile.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
profile : str
|
||||
The name of a profile which has a history file.
|
||||
"""
|
||||
return Path(locate_profile(profile)) / "history.sqlite"
|
||||
|
||||
@catch_corrupt_db
|
||||
def init_db(self):
|
||||
"""Connect to the database, and create tables if necessary."""
|
||||
if not self.enabled:
|
||||
self.db = DummyDB()
|
||||
return
|
||||
|
||||
# use detect_types so that timestamps return datetime objects
|
||||
kwargs = dict(detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES)
|
||||
kwargs.update(self.connection_options)
|
||||
self.db = sqlite3.connect(str(self.hist_file), **kwargs)
|
||||
with self.db:
|
||||
self.db.execute(
|
||||
"""CREATE TABLE IF NOT EXISTS sessions (session integer
|
||||
primary key autoincrement, start timestamp,
|
||||
end timestamp, num_cmds integer, remark text)"""
|
||||
)
|
||||
self.db.execute(
|
||||
"""CREATE TABLE IF NOT EXISTS history
|
||||
(session integer, line integer, source text, source_raw text,
|
||||
PRIMARY KEY (session, line))"""
|
||||
)
|
||||
# Output history is optional, but ensure the table's there so it can be
|
||||
# enabled later.
|
||||
self.db.execute(
|
||||
"""CREATE TABLE IF NOT EXISTS output_history
|
||||
(session integer, line integer, output text,
|
||||
PRIMARY KEY (session, line))"""
|
||||
)
|
||||
# success! reset corrupt db count
|
||||
self._corrupt_db_counter = 0
|
||||
|
||||
def writeout_cache(self):
|
||||
"""Overridden by HistoryManager to dump the cache before certain
|
||||
database lookups."""
|
||||
pass
|
||||
|
||||
## -------------------------------
|
||||
## Methods for retrieving history:
|
||||
## -------------------------------
|
||||
def _run_sql(self, sql, params, raw=True, output=False, latest=False):
|
||||
"""Prepares and runs an SQL query for the history database.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
sql : str
|
||||
Any filtering expressions to go after SELECT ... FROM ...
|
||||
params : tuple
|
||||
Parameters passed to the SQL query (to replace "?")
|
||||
raw, output : bool
|
||||
See :meth:`get_range`
|
||||
latest : bool
|
||||
Select rows with max (session, line)
|
||||
|
||||
Returns
|
||||
-------
|
||||
Tuples as :meth:`get_range`
|
||||
"""
|
||||
toget = 'source_raw' if raw else 'source'
|
||||
sqlfrom = "history"
|
||||
if output:
|
||||
sqlfrom = "history LEFT JOIN output_history USING (session, line)"
|
||||
toget = "history.%s, output_history.output" % toget
|
||||
if latest:
|
||||
toget += ", MAX(session * 128 * 1024 + line)"
|
||||
this_querry = "SELECT session, line, %s FROM %s " % (toget, sqlfrom) + sql
|
||||
cur = self.db.execute(this_querry, params)
|
||||
if latest:
|
||||
cur = (row[:-1] for row in cur)
|
||||
if output: # Regroup into 3-tuples, and parse JSON
|
||||
return ((ses, lin, (inp, out)) for ses, lin, inp, out in cur)
|
||||
return cur
|
||||
|
||||
@only_when_enabled
|
||||
@catch_corrupt_db
|
||||
def get_session_info(self, session):
|
||||
"""Get info about a session.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
session : int
|
||||
Session number to retrieve.
|
||||
|
||||
Returns
|
||||
-------
|
||||
session_id : int
|
||||
Session ID number
|
||||
start : datetime
|
||||
Timestamp for the start of the session.
|
||||
end : datetime
|
||||
Timestamp for the end of the session, or None if IPython crashed.
|
||||
num_cmds : int
|
||||
Number of commands run, or None if IPython crashed.
|
||||
remark : unicode
|
||||
A manually set description.
|
||||
"""
|
||||
query = "SELECT * from sessions where session == ?"
|
||||
return self.db.execute(query, (session,)).fetchone()
|
||||
|
||||
@catch_corrupt_db
|
||||
def get_last_session_id(self):
|
||||
"""Get the last session ID currently in the database.
|
||||
|
||||
Within IPython, this should be the same as the value stored in
|
||||
:attr:`HistoryManager.session_number`.
|
||||
"""
|
||||
for record in self.get_tail(n=1, include_latest=True):
|
||||
return record[0]
|
||||
|
||||
@catch_corrupt_db
|
||||
def get_tail(self, n=10, raw=True, output=False, include_latest=False):
|
||||
"""Get the last n lines from the history database.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
n : int
|
||||
The number of lines to get
|
||||
raw, output : bool
|
||||
See :meth:`get_range`
|
||||
include_latest : bool
|
||||
If False (default), n+1 lines are fetched, and the latest one
|
||||
is discarded. This is intended to be used where the function
|
||||
is called by a user command, which it should not return.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Tuples as :meth:`get_range`
|
||||
"""
|
||||
self.writeout_cache()
|
||||
if not include_latest:
|
||||
n += 1
|
||||
cur = self._run_sql(
|
||||
"ORDER BY session DESC, line DESC LIMIT ?", (n,), raw=raw, output=output
|
||||
)
|
||||
if not include_latest:
|
||||
return reversed(list(cur)[1:])
|
||||
return reversed(list(cur))
|
||||
|
||||
@catch_corrupt_db
|
||||
def search(self, pattern="*", raw=True, search_raw=True,
|
||||
output=False, n=None, unique=False):
|
||||
"""Search the database using unix glob-style matching (wildcards
|
||||
* and ?).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
pattern : str
|
||||
The wildcarded pattern to match when searching
|
||||
search_raw : bool
|
||||
If True, search the raw input, otherwise, the parsed input
|
||||
raw, output : bool
|
||||
See :meth:`get_range`
|
||||
n : None or int
|
||||
If an integer is given, it defines the limit of
|
||||
returned entries.
|
||||
unique : bool
|
||||
When it is true, return only unique entries.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Tuples as :meth:`get_range`
|
||||
"""
|
||||
tosearch = "source_raw" if search_raw else "source"
|
||||
if output:
|
||||
tosearch = "history." + tosearch
|
||||
self.writeout_cache()
|
||||
sqlform = "WHERE %s GLOB ?" % tosearch
|
||||
params = (pattern,)
|
||||
if unique:
|
||||
sqlform += ' GROUP BY {0}'.format(tosearch)
|
||||
if n is not None:
|
||||
sqlform += " ORDER BY session DESC, line DESC LIMIT ?"
|
||||
params += (n,)
|
||||
elif unique:
|
||||
sqlform += " ORDER BY session, line"
|
||||
cur = self._run_sql(sqlform, params, raw=raw, output=output, latest=unique)
|
||||
if n is not None:
|
||||
return reversed(list(cur))
|
||||
return cur
|
||||
|
||||
@catch_corrupt_db
|
||||
def get_range(self, session, start=1, stop=None, raw=True,output=False):
|
||||
"""Retrieve input by session.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
session : int
|
||||
Session number to retrieve.
|
||||
start : int
|
||||
First line to retrieve.
|
||||
stop : int
|
||||
End of line range (excluded from output itself). If None, retrieve
|
||||
to the end of the session.
|
||||
raw : bool
|
||||
If True, return untranslated input
|
||||
output : bool
|
||||
If True, attempt to include output. This will be 'real' Python
|
||||
objects for the current session, or text reprs from previous
|
||||
sessions if db_log_output was enabled at the time. Where no output
|
||||
is found, None is used.
|
||||
|
||||
Returns
|
||||
-------
|
||||
entries
|
||||
An iterator over the desired lines. Each line is a 3-tuple, either
|
||||
(session, line, input) if output is False, or
|
||||
(session, line, (input, output)) if output is True.
|
||||
"""
|
||||
if stop:
|
||||
lineclause = "line >= ? AND line < ?"
|
||||
params = (session, start, stop)
|
||||
else:
|
||||
lineclause = "line>=?"
|
||||
params = (session, start)
|
||||
|
||||
return self._run_sql("WHERE session==? AND %s" % lineclause,
|
||||
params, raw=raw, output=output)
|
||||
|
||||
def get_range_by_str(self, rangestr, raw=True, output=False):
|
||||
"""Get lines of history from a string of ranges, as used by magic
|
||||
commands %hist, %save, %macro, etc.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
rangestr : str
|
||||
A string specifying ranges, e.g. "5 ~2/1-4". If empty string is used,
|
||||
this will return everything from current session's history.
|
||||
|
||||
See the documentation of :func:`%history` for the full details.
|
||||
|
||||
raw, output : bool
|
||||
As :meth:`get_range`
|
||||
|
||||
Returns
|
||||
-------
|
||||
Tuples as :meth:`get_range`
|
||||
"""
|
||||
for sess, s, e in extract_hist_ranges(rangestr):
|
||||
for line in self.get_range(sess, s, e, raw=raw, output=output):
|
||||
yield line
|
||||
|
||||
|
||||
class HistoryManager(HistoryAccessor):
|
||||
"""A class to organize all history-related functionality in one place.
|
||||
"""
|
||||
# Public interface
|
||||
|
||||
# An instance of the IPython shell we are attached to
|
||||
shell = Instance('IPython.core.interactiveshell.InteractiveShellABC',
|
||||
allow_none=True)
|
||||
# Lists to hold processed and raw history. These start with a blank entry
|
||||
# so that we can index them starting from 1
|
||||
input_hist_parsed = List([""])
|
||||
input_hist_raw = List([""])
|
||||
# A list of directories visited during session
|
||||
dir_hist: List = List()
|
||||
|
||||
@default("dir_hist")
|
||||
def _dir_hist_default(self):
|
||||
try:
|
||||
return [Path.cwd()]
|
||||
except OSError:
|
||||
return []
|
||||
|
||||
# A dict of output history, keyed with ints from the shell's
|
||||
# execution count.
|
||||
output_hist = Dict()
|
||||
# The text/plain repr of outputs.
|
||||
output_hist_reprs = Dict()
|
||||
|
||||
# The number of the current session in the history database
|
||||
session_number = Integer()
|
||||
|
||||
db_log_output = Bool(False,
|
||||
help="Should the history database include output? (default: no)"
|
||||
).tag(config=True)
|
||||
db_cache_size = Integer(0,
|
||||
help="Write to database every x commands (higher values save disk access & power).\n"
|
||||
"Values of 1 or less effectively disable caching."
|
||||
).tag(config=True)
|
||||
# The input and output caches
|
||||
db_input_cache: List = List()
|
||||
db_output_cache: List = List()
|
||||
|
||||
# History saving in separate thread
|
||||
save_thread = Instance('IPython.core.history.HistorySavingThread',
|
||||
allow_none=True)
|
||||
save_flag = Instance(threading.Event, allow_none=True)
|
||||
|
||||
# Private interface
|
||||
# Variables used to store the three last inputs from the user. On each new
|
||||
# history update, we populate the user's namespace with these, shifted as
|
||||
# necessary.
|
||||
_i00 = Unicode("")
|
||||
_i = Unicode("")
|
||||
_ii = Unicode("")
|
||||
_iii = Unicode("")
|
||||
|
||||
# A regex matching all forms of the exit command, so that we don't store
|
||||
# them in the history (it's annoying to rewind the first entry and land on
|
||||
# an exit call).
|
||||
_exit_re = re.compile(r"(exit|quit)(\s*\(.*\))?$")
|
||||
|
||||
def __init__(self, shell=None, config=None, **traits):
|
||||
"""Create a new history manager associated with a shell instance.
|
||||
"""
|
||||
super(HistoryManager, self).__init__(shell=shell, config=config,
|
||||
**traits)
|
||||
self.save_flag = threading.Event()
|
||||
self.db_input_cache_lock = threading.Lock()
|
||||
self.db_output_cache_lock = threading.Lock()
|
||||
|
||||
try:
|
||||
self.new_session()
|
||||
except sqlite3.OperationalError:
|
||||
self.log.error("Failed to create history session in %s. History will not be saved.",
|
||||
self.hist_file, exc_info=True)
|
||||
self.hist_file = ':memory:'
|
||||
|
||||
if self.enabled and self.hist_file != ':memory:':
|
||||
self.save_thread = HistorySavingThread(self)
|
||||
try:
|
||||
self.save_thread.start()
|
||||
except RuntimeError:
|
||||
self.log.error(
|
||||
"Failed to start history saving thread. History will not be saved.",
|
||||
exc_info=True,
|
||||
)
|
||||
self.hist_file = ":memory:"
|
||||
|
||||
def _get_hist_file_name(self, profile=None):
|
||||
"""Get default history file name based on the Shell's profile.
|
||||
|
||||
The profile parameter is ignored, but must exist for compatibility with
|
||||
the parent class."""
|
||||
profile_dir = self.shell.profile_dir.location
|
||||
return Path(profile_dir) / "history.sqlite"
|
||||
|
||||
@only_when_enabled
|
||||
def new_session(self, conn=None):
|
||||
"""Get a new session number."""
|
||||
if conn is None:
|
||||
conn = self.db
|
||||
|
||||
with conn:
|
||||
cur = conn.execute(
|
||||
"""INSERT INTO sessions VALUES (NULL, ?, NULL,
|
||||
NULL, '') """,
|
||||
(datetime.datetime.now().isoformat(" "),),
|
||||
)
|
||||
self.session_number = cur.lastrowid
|
||||
|
||||
def end_session(self):
|
||||
"""Close the database session, filling in the end time and line count."""
|
||||
self.writeout_cache()
|
||||
with self.db:
|
||||
self.db.execute(
|
||||
"""UPDATE sessions SET end=?, num_cmds=? WHERE
|
||||
session==?""",
|
||||
(
|
||||
datetime.datetime.now().isoformat(" "),
|
||||
len(self.input_hist_parsed) - 1,
|
||||
self.session_number,
|
||||
),
|
||||
)
|
||||
self.session_number = 0
|
||||
|
||||
def name_session(self, name):
|
||||
"""Give the current session a name in the history database."""
|
||||
with self.db:
|
||||
self.db.execute("UPDATE sessions SET remark=? WHERE session==?",
|
||||
(name, self.session_number))
|
||||
|
||||
def reset(self, new_session=True):
|
||||
"""Clear the session history, releasing all object references, and
|
||||
optionally open a new session."""
|
||||
self.output_hist.clear()
|
||||
# The directory history can't be completely empty
|
||||
self.dir_hist[:] = [Path.cwd()]
|
||||
|
||||
if new_session:
|
||||
if self.session_number:
|
||||
self.end_session()
|
||||
self.input_hist_parsed[:] = [""]
|
||||
self.input_hist_raw[:] = [""]
|
||||
self.new_session()
|
||||
|
||||
# ------------------------------
|
||||
# Methods for retrieving history
|
||||
# ------------------------------
|
||||
def get_session_info(self, session=0):
|
||||
"""Get info about a session.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
session : int
|
||||
Session number to retrieve. The current session is 0, and negative
|
||||
numbers count back from current session, so -1 is the previous session.
|
||||
|
||||
Returns
|
||||
-------
|
||||
session_id : int
|
||||
Session ID number
|
||||
start : datetime
|
||||
Timestamp for the start of the session.
|
||||
end : datetime
|
||||
Timestamp for the end of the session, or None if IPython crashed.
|
||||
num_cmds : int
|
||||
Number of commands run, or None if IPython crashed.
|
||||
remark : unicode
|
||||
A manually set description.
|
||||
"""
|
||||
if session <= 0:
|
||||
session += self.session_number
|
||||
|
||||
return super(HistoryManager, self).get_session_info(session=session)
|
||||
|
||||
@catch_corrupt_db
|
||||
def get_tail(self, n=10, raw=True, output=False, include_latest=False):
|
||||
"""Get the last n lines from the history database.
|
||||
|
||||
Most recent entry last.
|
||||
|
||||
Completion will be reordered so that that the last ones are when
|
||||
possible from current session.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
n : int
|
||||
The number of lines to get
|
||||
raw, output : bool
|
||||
See :meth:`get_range`
|
||||
include_latest : bool
|
||||
If False (default), n+1 lines are fetched, and the latest one
|
||||
is discarded. This is intended to be used where the function
|
||||
is called by a user command, which it should not return.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Tuples as :meth:`get_range`
|
||||
"""
|
||||
self.writeout_cache()
|
||||
if not include_latest:
|
||||
n += 1
|
||||
# cursor/line/entry
|
||||
this_cur = list(
|
||||
self._run_sql(
|
||||
"WHERE session == ? ORDER BY line DESC LIMIT ? ",
|
||||
(self.session_number, n),
|
||||
raw=raw,
|
||||
output=output,
|
||||
)
|
||||
)
|
||||
other_cur = list(
|
||||
self._run_sql(
|
||||
"WHERE session != ? ORDER BY session DESC, line DESC LIMIT ?",
|
||||
(self.session_number, n),
|
||||
raw=raw,
|
||||
output=output,
|
||||
)
|
||||
)
|
||||
|
||||
everything = this_cur + other_cur
|
||||
|
||||
everything = everything[:n]
|
||||
|
||||
if not include_latest:
|
||||
return list(everything)[:0:-1]
|
||||
return list(everything)[::-1]
|
||||
|
||||
def _get_range_session(self, start=1, stop=None, raw=True, output=False):
|
||||
"""Get input and output history from the current session. Called by
|
||||
get_range, and takes similar parameters."""
|
||||
input_hist = self.input_hist_raw if raw else self.input_hist_parsed
|
||||
|
||||
n = len(input_hist)
|
||||
if start < 0:
|
||||
start += n
|
||||
if not stop or (stop > n):
|
||||
stop = n
|
||||
elif stop < 0:
|
||||
stop += n
|
||||
|
||||
for i in range(start, stop):
|
||||
if output:
|
||||
line = (input_hist[i], self.output_hist_reprs.get(i))
|
||||
else:
|
||||
line = input_hist[i]
|
||||
yield (0, i, line)
|
||||
|
||||
def get_range(self, session=0, start=1, stop=None, raw=True,output=False):
|
||||
"""Retrieve input by session.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
session : int
|
||||
Session number to retrieve. The current session is 0, and negative
|
||||
numbers count back from current session, so -1 is previous session.
|
||||
start : int
|
||||
First line to retrieve.
|
||||
stop : int
|
||||
End of line range (excluded from output itself). If None, retrieve
|
||||
to the end of the session.
|
||||
raw : bool
|
||||
If True, return untranslated input
|
||||
output : bool
|
||||
If True, attempt to include output. This will be 'real' Python
|
||||
objects for the current session, or text reprs from previous
|
||||
sessions if db_log_output was enabled at the time. Where no output
|
||||
is found, None is used.
|
||||
|
||||
Returns
|
||||
-------
|
||||
entries
|
||||
An iterator over the desired lines. Each line is a 3-tuple, either
|
||||
(session, line, input) if output is False, or
|
||||
(session, line, (input, output)) if output is True.
|
||||
"""
|
||||
if session <= 0:
|
||||
session += self.session_number
|
||||
if session==self.session_number: # Current session
|
||||
return self._get_range_session(start, stop, raw, output)
|
||||
return super(HistoryManager, self).get_range(session, start, stop, raw,
|
||||
output)
|
||||
|
||||
## ----------------------------
|
||||
## Methods for storing history:
|
||||
## ----------------------------
|
||||
def store_inputs(self, line_num, source, source_raw=None):
|
||||
"""Store source and raw input in history and create input cache
|
||||
variables ``_i*``.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
line_num : int
|
||||
The prompt number of this input.
|
||||
source : str
|
||||
Python input.
|
||||
source_raw : str, optional
|
||||
If given, this is the raw input without any IPython transformations
|
||||
applied to it. If not given, ``source`` is used.
|
||||
"""
|
||||
if source_raw is None:
|
||||
source_raw = source
|
||||
source = source.rstrip('\n')
|
||||
source_raw = source_raw.rstrip('\n')
|
||||
|
||||
# do not store exit/quit commands
|
||||
if self._exit_re.match(source_raw.strip()):
|
||||
return
|
||||
|
||||
self.input_hist_parsed.append(source)
|
||||
self.input_hist_raw.append(source_raw)
|
||||
|
||||
with self.db_input_cache_lock:
|
||||
self.db_input_cache.append((line_num, source, source_raw))
|
||||
# Trigger to flush cache and write to DB.
|
||||
if len(self.db_input_cache) >= self.db_cache_size:
|
||||
self.save_flag.set()
|
||||
|
||||
# update the auto _i variables
|
||||
self._iii = self._ii
|
||||
self._ii = self._i
|
||||
self._i = self._i00
|
||||
self._i00 = source_raw
|
||||
|
||||
# hackish access to user namespace to create _i1,_i2... dynamically
|
||||
new_i = '_i%s' % line_num
|
||||
to_main = {'_i': self._i,
|
||||
'_ii': self._ii,
|
||||
'_iii': self._iii,
|
||||
new_i : self._i00 }
|
||||
|
||||
if self.shell is not None:
|
||||
self.shell.push(to_main, interactive=False)
|
||||
|
||||
def store_output(self, line_num):
|
||||
"""If database output logging is enabled, this saves all the
|
||||
outputs from the indicated prompt number to the database. It's
|
||||
called by run_cell after code has been executed.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
line_num : int
|
||||
The line number from which to save outputs
|
||||
"""
|
||||
if (not self.db_log_output) or (line_num not in self.output_hist_reprs):
|
||||
return
|
||||
output = self.output_hist_reprs[line_num]
|
||||
|
||||
with self.db_output_cache_lock:
|
||||
self.db_output_cache.append((line_num, output))
|
||||
if self.db_cache_size <= 1:
|
||||
self.save_flag.set()
|
||||
|
||||
def _writeout_input_cache(self, conn):
|
||||
with conn:
|
||||
for line in self.db_input_cache:
|
||||
conn.execute("INSERT INTO history VALUES (?, ?, ?, ?)",
|
||||
(self.session_number,)+line)
|
||||
|
||||
def _writeout_output_cache(self, conn):
|
||||
with conn:
|
||||
for line in self.db_output_cache:
|
||||
conn.execute("INSERT INTO output_history VALUES (?, ?, ?)",
|
||||
(self.session_number,)+line)
|
||||
|
||||
@only_when_enabled
|
||||
def writeout_cache(self, conn=None):
|
||||
"""Write any entries in the cache to the database."""
|
||||
if conn is None:
|
||||
conn = self.db
|
||||
|
||||
with self.db_input_cache_lock:
|
||||
try:
|
||||
self._writeout_input_cache(conn)
|
||||
except sqlite3.IntegrityError:
|
||||
self.new_session(conn)
|
||||
print("ERROR! Session/line number was not unique in",
|
||||
"database. History logging moved to new session",
|
||||
self.session_number)
|
||||
try:
|
||||
# Try writing to the new session. If this fails, don't
|
||||
# recurse
|
||||
self._writeout_input_cache(conn)
|
||||
except sqlite3.IntegrityError:
|
||||
pass
|
||||
finally:
|
||||
self.db_input_cache = []
|
||||
|
||||
with self.db_output_cache_lock:
|
||||
try:
|
||||
self._writeout_output_cache(conn)
|
||||
except sqlite3.IntegrityError:
|
||||
print("!! Session/line number for output was not unique",
|
||||
"in database. Output will not be stored.")
|
||||
finally:
|
||||
self.db_output_cache = []
|
||||
|
||||
|
||||
class HistorySavingThread(threading.Thread):
|
||||
"""This thread takes care of writing history to the database, so that
|
||||
the UI isn't held up while that happens.
|
||||
|
||||
It waits for the HistoryManager's save_flag to be set, then writes out
|
||||
the history cache. The main thread is responsible for setting the flag when
|
||||
the cache size reaches a defined threshold."""
|
||||
daemon = True
|
||||
stop_now = False
|
||||
enabled = True
|
||||
def __init__(self, history_manager):
|
||||
super(HistorySavingThread, self).__init__(name="IPythonHistorySavingThread")
|
||||
self.history_manager = history_manager
|
||||
self.enabled = history_manager.enabled
|
||||
|
||||
@only_when_enabled
|
||||
def run(self):
|
||||
atexit.register(self.stop)
|
||||
# We need a separate db connection per thread:
|
||||
try:
|
||||
self.db = sqlite3.connect(
|
||||
str(self.history_manager.hist_file),
|
||||
**self.history_manager.connection_options,
|
||||
)
|
||||
while True:
|
||||
self.history_manager.save_flag.wait()
|
||||
if self.stop_now:
|
||||
self.db.close()
|
||||
return
|
||||
self.history_manager.save_flag.clear()
|
||||
self.history_manager.writeout_cache(self.db)
|
||||
except Exception as e:
|
||||
print(("The history saving thread hit an unexpected error (%s)."
|
||||
"History will not be written to the database.") % repr(e))
|
||||
finally:
|
||||
atexit.unregister(self.stop)
|
||||
|
||||
def stop(self):
|
||||
"""This can be called from the main thread to safely stop this thread.
|
||||
|
||||
Note that it does not attempt to write out remaining history before
|
||||
exiting. That should be done by calling the HistoryManager's
|
||||
end_session method."""
|
||||
self.stop_now = True
|
||||
self.history_manager.save_flag.set()
|
||||
self.join()
|
||||
|
||||
|
||||
# To match, e.g. ~5/8-~2/3
|
||||
range_re = re.compile(r"""
|
||||
((?P<startsess>~?\d+)/)?
|
||||
(?P<start>\d+)?
|
||||
((?P<sep>[\-:])
|
||||
((?P<endsess>~?\d+)/)?
|
||||
(?P<end>\d+))?
|
||||
$""", re.VERBOSE)
|
||||
|
||||
|
||||
def extract_hist_ranges(ranges_str):
|
||||
"""Turn a string of history ranges into 3-tuples of (session, start, stop).
|
||||
|
||||
Empty string results in a `[(0, 1, None)]`, i.e. "everything from current
|
||||
session".
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> list(extract_hist_ranges("~8/5-~7/4 2"))
|
||||
[(-8, 5, None), (-7, 1, 5), (0, 2, 3)]
|
||||
"""
|
||||
if ranges_str == "":
|
||||
yield (0, 1, None) # Everything from current session
|
||||
return
|
||||
|
||||
for range_str in ranges_str.split():
|
||||
rmatch = range_re.match(range_str)
|
||||
if not rmatch:
|
||||
continue
|
||||
start = rmatch.group("start")
|
||||
if start:
|
||||
start = int(start)
|
||||
end = rmatch.group("end")
|
||||
# If no end specified, get (a, a + 1)
|
||||
end = int(end) if end else start + 1
|
||||
else: # start not specified
|
||||
if not rmatch.group('startsess'): # no startsess
|
||||
continue
|
||||
start = 1
|
||||
end = None # provide the entire session hist
|
||||
|
||||
if rmatch.group("sep") == "-": # 1-3 == 1:4 --> [1, 2, 3]
|
||||
end += 1
|
||||
startsess = rmatch.group("startsess") or "0"
|
||||
endsess = rmatch.group("endsess") or startsess
|
||||
startsess = int(startsess.replace("~","-"))
|
||||
endsess = int(endsess.replace("~","-"))
|
||||
assert endsess >= startsess, "start session must be earlier than end session"
|
||||
|
||||
if endsess == startsess:
|
||||
yield (startsess, start, end)
|
||||
continue
|
||||
# Multiple sessions in one range:
|
||||
yield (startsess, start, None)
|
||||
for sess in range(startsess+1, endsess):
|
||||
yield (sess, 1, None)
|
||||
yield (endsess, 1, end)
|
||||
|
||||
|
||||
def _format_lineno(session, line):
|
||||
"""Helper function to format line numbers properly."""
|
||||
if session == 0:
|
||||
return str(line)
|
||||
return "%s#%s" % (session, line)
|
158
.venv/lib/python3.12/site-packages/IPython/core/historyapp.py
Normal file
158
.venv/lib/python3.12/site-packages/IPython/core/historyapp.py
Normal file
@ -0,0 +1,158 @@
|
||||
# encoding: utf-8
|
||||
"""
|
||||
An application for managing IPython history.
|
||||
|
||||
To be invoked as the `ipython history` subcommand.
|
||||
"""
|
||||
|
||||
import sqlite3
|
||||
from pathlib import Path
|
||||
|
||||
from traitlets.config.application import Application
|
||||
from .application import BaseIPythonApplication
|
||||
from traitlets import Bool, Int, Dict
|
||||
from ..utils.io import ask_yes_no
|
||||
|
||||
trim_hist_help = """Trim the IPython history database to the last 1000 entries.
|
||||
|
||||
This actually copies the last 1000 entries to a new database, and then replaces
|
||||
the old file with the new. Use the `--keep=` argument to specify a number
|
||||
other than 1000.
|
||||
"""
|
||||
|
||||
clear_hist_help = """Clear the IPython history database, deleting all entries.
|
||||
|
||||
Because this is a destructive operation, IPython will prompt the user if they
|
||||
really want to do this. Passing a `-f` flag will force clearing without a
|
||||
prompt.
|
||||
|
||||
This is an handy alias to `ipython history trim --keep=0`
|
||||
"""
|
||||
|
||||
|
||||
class HistoryTrim(BaseIPythonApplication):
|
||||
description = trim_hist_help
|
||||
|
||||
backup = Bool(False, help="Keep the old history file as history.sqlite.<N>").tag(
|
||||
config=True
|
||||
)
|
||||
|
||||
keep = Int(1000, help="Number of recent lines to keep in the database.").tag(
|
||||
config=True
|
||||
)
|
||||
|
||||
flags = Dict( # type: ignore
|
||||
dict(backup=({"HistoryTrim": {"backup": True}}, backup.help))
|
||||
)
|
||||
|
||||
aliases = Dict(dict(keep="HistoryTrim.keep")) # type: ignore
|
||||
|
||||
def start(self):
|
||||
profile_dir = Path(self.profile_dir.location)
|
||||
hist_file = profile_dir / "history.sqlite"
|
||||
con = sqlite3.connect(hist_file)
|
||||
|
||||
# Grab the recent history from the current database.
|
||||
inputs = list(con.execute('SELECT session, line, source, source_raw FROM '
|
||||
'history ORDER BY session DESC, line DESC LIMIT ?', (self.keep+1,)))
|
||||
if len(inputs) <= self.keep:
|
||||
print("There are already at most %d entries in the history database." % self.keep)
|
||||
print("Not doing anything. Use --keep= argument to keep fewer entries")
|
||||
return
|
||||
|
||||
print("Trimming history to the most recent %d entries." % self.keep)
|
||||
|
||||
inputs.pop() # Remove the extra element we got to check the length.
|
||||
inputs.reverse()
|
||||
if inputs:
|
||||
first_session = inputs[0][0]
|
||||
outputs = list(con.execute('SELECT session, line, output FROM '
|
||||
'output_history WHERE session >= ?', (first_session,)))
|
||||
sessions = list(con.execute('SELECT session, start, end, num_cmds, remark FROM '
|
||||
'sessions WHERE session >= ?', (first_session,)))
|
||||
con.close()
|
||||
|
||||
# Create the new history database.
|
||||
new_hist_file = profile_dir / "history.sqlite.new"
|
||||
i = 0
|
||||
while new_hist_file.exists():
|
||||
# Make sure we don't interfere with an existing file.
|
||||
i += 1
|
||||
new_hist_file = profile_dir / ("history.sqlite.new" + str(i))
|
||||
new_db = sqlite3.connect(new_hist_file)
|
||||
new_db.execute("""CREATE TABLE IF NOT EXISTS sessions (session integer
|
||||
primary key autoincrement, start timestamp,
|
||||
end timestamp, num_cmds integer, remark text)""")
|
||||
new_db.execute("""CREATE TABLE IF NOT EXISTS history
|
||||
(session integer, line integer, source text, source_raw text,
|
||||
PRIMARY KEY (session, line))""")
|
||||
new_db.execute("""CREATE TABLE IF NOT EXISTS output_history
|
||||
(session integer, line integer, output text,
|
||||
PRIMARY KEY (session, line))""")
|
||||
new_db.commit()
|
||||
|
||||
|
||||
if inputs:
|
||||
with new_db:
|
||||
# Add the recent history into the new database.
|
||||
new_db.executemany('insert into sessions values (?,?,?,?,?)', sessions)
|
||||
new_db.executemany('insert into history values (?,?,?,?)', inputs)
|
||||
new_db.executemany('insert into output_history values (?,?,?)', outputs)
|
||||
new_db.close()
|
||||
|
||||
if self.backup:
|
||||
i = 1
|
||||
backup_hist_file = profile_dir / ("history.sqlite.old.%d" % i)
|
||||
while backup_hist_file.exists():
|
||||
i += 1
|
||||
backup_hist_file = profile_dir / ("history.sqlite.old.%d" % i)
|
||||
hist_file.rename(backup_hist_file)
|
||||
print("Backed up longer history file to", backup_hist_file)
|
||||
else:
|
||||
hist_file.unlink()
|
||||
|
||||
new_hist_file.rename(hist_file)
|
||||
|
||||
|
||||
class HistoryClear(HistoryTrim):
|
||||
description = clear_hist_help
|
||||
keep = Int(0, help="Number of recent lines to keep in the database.")
|
||||
|
||||
force = Bool(False, help="Don't prompt user for confirmation").tag(config=True)
|
||||
|
||||
flags = Dict( # type: ignore
|
||||
dict(
|
||||
force=({"HistoryClear": {"force": True}}, force.help),
|
||||
f=({"HistoryTrim": {"force": True}}, force.help),
|
||||
)
|
||||
)
|
||||
aliases = Dict() # type: ignore
|
||||
|
||||
def start(self):
|
||||
if self.force or ask_yes_no(
|
||||
"Really delete all ipython history? ", default="no", interrupt="no"
|
||||
):
|
||||
HistoryTrim.start(self)
|
||||
|
||||
|
||||
class HistoryApp(Application):
|
||||
name = "ipython-history"
|
||||
description = "Manage the IPython history database."
|
||||
|
||||
subcommands = Dict(dict(
|
||||
trim = (HistoryTrim, HistoryTrim.description.splitlines()[0]),
|
||||
clear = (HistoryClear, HistoryClear.description.splitlines()[0]),
|
||||
))
|
||||
|
||||
def start(self):
|
||||
if self.subapp is None:
|
||||
print(
|
||||
"No subcommand specified. Must specify one of: "
|
||||
+ ", ".join(map(repr, self.subcommands))
|
||||
+ ".\n"
|
||||
)
|
||||
self.print_description()
|
||||
self.print_subcommands()
|
||||
self.exit(1)
|
||||
else:
|
||||
return self.subapp.start()
|
173
.venv/lib/python3.12/site-packages/IPython/core/hooks.py
Normal file
173
.venv/lib/python3.12/site-packages/IPython/core/hooks.py
Normal file
@ -0,0 +1,173 @@
|
||||
"""Hooks for IPython.
|
||||
|
||||
In Python, it is possible to overwrite any method of any object if you really
|
||||
want to. But IPython exposes a few 'hooks', methods which are *designed* to
|
||||
be overwritten by users for customization purposes. This module defines the
|
||||
default versions of all such hooks, which get used by IPython if not
|
||||
overridden by the user.
|
||||
|
||||
Hooks are simple functions, but they should be declared with ``self`` as their
|
||||
first argument, because when activated they are registered into IPython as
|
||||
instance methods. The self argument will be the IPython running instance
|
||||
itself, so hooks have full access to the entire IPython object.
|
||||
|
||||
If you wish to define a new hook and activate it, you can make an :doc:`extension
|
||||
</config/extensions/index>` or a :ref:`startup script <startup_files>`. For
|
||||
example, you could use a startup file like this::
|
||||
|
||||
import os
|
||||
|
||||
def calljed(self,filename, linenum):
|
||||
"My editor hook calls the jed editor directly."
|
||||
print("Calling my own editor, jed ...")
|
||||
if os.system('jed +%d %s' % (linenum,filename)) != 0:
|
||||
raise TryNext()
|
||||
|
||||
def load_ipython_extension(ip):
|
||||
ip.set_hook('editor', calljed)
|
||||
|
||||
"""
|
||||
|
||||
#*****************************************************************************
|
||||
# Copyright (C) 2005 Fernando Perez. <fperez@colorado.edu>
|
||||
#
|
||||
# Distributed under the terms of the BSD License. The full license is in
|
||||
# the file COPYING, distributed as part of this software.
|
||||
#*****************************************************************************
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from .error import TryNext
|
||||
|
||||
# List here all the default hooks. For now it's just the editor functions
|
||||
# but over time we'll move here all the public API for user-accessible things.
|
||||
|
||||
__all__ = [
|
||||
"editor",
|
||||
"synchronize_with_editor",
|
||||
"show_in_pager",
|
||||
"pre_prompt_hook",
|
||||
"clipboard_get",
|
||||
]
|
||||
|
||||
deprecated = {'pre_run_code_hook': "a callback for the 'pre_execute' or 'pre_run_cell' event",
|
||||
'late_startup_hook': "a callback for the 'shell_initialized' event",
|
||||
'shutdown_hook': "the atexit module",
|
||||
}
|
||||
|
||||
def editor(self, filename, linenum=None, wait=True):
|
||||
"""Open the default editor at the given filename and linenumber.
|
||||
|
||||
This is IPython's default editor hook, you can use it as an example to
|
||||
write your own modified one. To set your own editor function as the
|
||||
new editor hook, call ip.set_hook('editor',yourfunc)."""
|
||||
|
||||
# IPython configures a default editor at startup by reading $EDITOR from
|
||||
# the environment, and falling back on vi (unix) or notepad (win32).
|
||||
editor = self.editor
|
||||
|
||||
# marker for at which line to open the file (for existing objects)
|
||||
if linenum is None or editor=='notepad':
|
||||
linemark = ''
|
||||
else:
|
||||
linemark = '+%d' % int(linenum)
|
||||
|
||||
# Enclose in quotes if necessary and legal
|
||||
if ' ' in editor and os.path.isfile(editor) and editor[0] != '"':
|
||||
editor = '"%s"' % editor
|
||||
|
||||
# Call the actual editor
|
||||
proc = subprocess.Popen('%s %s %s' % (editor, linemark, filename),
|
||||
shell=True)
|
||||
if wait and proc.wait() != 0:
|
||||
raise TryNext()
|
||||
|
||||
|
||||
def synchronize_with_editor(self, filename, linenum, column):
|
||||
pass
|
||||
|
||||
|
||||
class CommandChainDispatcher:
|
||||
""" Dispatch calls to a chain of commands until some func can handle it
|
||||
|
||||
Usage: instantiate, execute "add" to add commands (with optional
|
||||
priority), execute normally via f() calling mechanism.
|
||||
|
||||
"""
|
||||
def __init__(self,commands=None):
|
||||
if commands is None:
|
||||
self.chain = []
|
||||
else:
|
||||
self.chain = commands
|
||||
|
||||
|
||||
def __call__(self,*args, **kw):
|
||||
""" Command chain is called just like normal func.
|
||||
|
||||
This will call all funcs in chain with the same args as were given to
|
||||
this function, and return the result of first func that didn't raise
|
||||
TryNext"""
|
||||
last_exc = TryNext()
|
||||
for prio,cmd in self.chain:
|
||||
# print("prio",prio,"cmd",cmd) # dbg
|
||||
try:
|
||||
return cmd(*args, **kw)
|
||||
except TryNext as exc:
|
||||
last_exc = exc
|
||||
# if no function will accept it, raise TryNext up to the caller
|
||||
raise last_exc
|
||||
|
||||
def __str__(self):
|
||||
return str(self.chain)
|
||||
|
||||
def add(self, func, priority=0):
|
||||
""" Add a func to the cmd chain with given priority """
|
||||
self.chain.append((priority, func))
|
||||
self.chain.sort(key=lambda x: x[0])
|
||||
|
||||
def __iter__(self):
|
||||
""" Return all objects in chain.
|
||||
|
||||
Handy if the objects are not callable.
|
||||
"""
|
||||
return iter(self.chain)
|
||||
|
||||
|
||||
def show_in_pager(self, data, start, screen_lines):
|
||||
""" Run a string through pager """
|
||||
# raising TryNext here will use the default paging functionality
|
||||
raise TryNext
|
||||
|
||||
|
||||
def pre_prompt_hook(self):
|
||||
""" Run before displaying the next prompt
|
||||
|
||||
Use this e.g. to display output from asynchronous operations (in order
|
||||
to not mess up text entry)
|
||||
"""
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def clipboard_get(self):
|
||||
""" Get text from the clipboard.
|
||||
"""
|
||||
from ..lib.clipboard import (
|
||||
osx_clipboard_get,
|
||||
tkinter_clipboard_get,
|
||||
win32_clipboard_get,
|
||||
wayland_clipboard_get,
|
||||
)
|
||||
if sys.platform == 'win32':
|
||||
chain = [win32_clipboard_get, tkinter_clipboard_get]
|
||||
elif sys.platform == 'darwin':
|
||||
chain = [osx_clipboard_get, tkinter_clipboard_get]
|
||||
else:
|
||||
chain = [wayland_clipboard_get, tkinter_clipboard_get]
|
||||
dispatcher = CommandChainDispatcher()
|
||||
for func in chain:
|
||||
dispatcher.add(func)
|
||||
text = dispatcher()
|
||||
return text
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user