fixed subscription table
This commit is contained in:
47
.venv/lib/python3.12/site-packages/jedi/plugins/__init__.py
Normal file
47
.venv/lib/python3.12/site-packages/jedi/plugins/__init__.py
Normal file
@@ -0,0 +1,47 @@
|
||||
from functools import wraps
|
||||
|
||||
|
||||
class _PluginManager:
|
||||
def __init__(self):
|
||||
self._registered_plugins = []
|
||||
self._cached_base_callbacks = {}
|
||||
self._built_functions = {}
|
||||
|
||||
def register(self, *plugins):
|
||||
"""
|
||||
Makes it possible to register your plugin.
|
||||
"""
|
||||
self._registered_plugins.extend(plugins)
|
||||
self._build_functions()
|
||||
|
||||
def decorate(self, name=None):
|
||||
def decorator(callback):
|
||||
@wraps(callback)
|
||||
def wrapper(*args, **kwargs):
|
||||
return built_functions[public_name](*args, **kwargs)
|
||||
|
||||
public_name = name or callback.__name__
|
||||
|
||||
assert public_name not in self._built_functions
|
||||
built_functions = self._built_functions
|
||||
built_functions[public_name] = callback
|
||||
self._cached_base_callbacks[public_name] = callback
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
def _build_functions(self):
|
||||
for name, callback in self._cached_base_callbacks.items():
|
||||
for plugin in reversed(self._registered_plugins):
|
||||
# Need to reverse so the first plugin is run first.
|
||||
try:
|
||||
func = getattr(plugin, name)
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
callback = func(callback)
|
||||
self._built_functions[name] = callback
|
||||
|
||||
|
||||
plugin_manager = _PluginManager()
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
296
.venv/lib/python3.12/site-packages/jedi/plugins/django.py
Normal file
296
.venv/lib/python3.12/site-packages/jedi/plugins/django.py
Normal file
@@ -0,0 +1,296 @@
|
||||
"""
|
||||
Module is used to infer Django model fields.
|
||||
"""
|
||||
from inspect import Parameter
|
||||
|
||||
from jedi import debug
|
||||
from jedi.inference.cache import inference_state_function_cache
|
||||
from jedi.inference.base_value import ValueSet, iterator_to_value_set, ValueWrapper
|
||||
from jedi.inference.filters import DictFilter, AttributeOverwrite
|
||||
from jedi.inference.names import NameWrapper, BaseTreeParamName
|
||||
from jedi.inference.compiled.value import EmptyCompiledName
|
||||
from jedi.inference.value.instance import TreeInstance
|
||||
from jedi.inference.value.klass import ClassMixin
|
||||
from jedi.inference.gradual.base import GenericClass
|
||||
from jedi.inference.gradual.generics import TupleGenericManager
|
||||
from jedi.inference.signature import AbstractSignature
|
||||
|
||||
|
||||
mapping = {
|
||||
'IntegerField': (None, 'int'),
|
||||
'BigIntegerField': (None, 'int'),
|
||||
'PositiveIntegerField': (None, 'int'),
|
||||
'SmallIntegerField': (None, 'int'),
|
||||
'CharField': (None, 'str'),
|
||||
'TextField': (None, 'str'),
|
||||
'EmailField': (None, 'str'),
|
||||
'GenericIPAddressField': (None, 'str'),
|
||||
'URLField': (None, 'str'),
|
||||
'FloatField': (None, 'float'),
|
||||
'BinaryField': (None, 'bytes'),
|
||||
'BooleanField': (None, 'bool'),
|
||||
'DecimalField': ('decimal', 'Decimal'),
|
||||
'TimeField': ('datetime', 'time'),
|
||||
'DurationField': ('datetime', 'timedelta'),
|
||||
'DateField': ('datetime', 'date'),
|
||||
'DateTimeField': ('datetime', 'datetime'),
|
||||
'UUIDField': ('uuid', 'UUID'),
|
||||
}
|
||||
|
||||
_FILTER_LIKE_METHODS = ('create', 'filter', 'exclude', 'update', 'get',
|
||||
'get_or_create', 'update_or_create')
|
||||
|
||||
|
||||
@inference_state_function_cache()
|
||||
def _get_deferred_attributes(inference_state):
|
||||
return inference_state.import_module(
|
||||
('django', 'db', 'models', 'query_utils')
|
||||
).py__getattribute__('DeferredAttribute').execute_annotation()
|
||||
|
||||
|
||||
def _infer_scalar_field(inference_state, field_name, field_tree_instance, is_instance):
|
||||
try:
|
||||
module_name, attribute_name = mapping[field_tree_instance.py__name__()]
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
if not is_instance:
|
||||
return _get_deferred_attributes(inference_state)
|
||||
|
||||
if module_name is None:
|
||||
module = inference_state.builtins_module
|
||||
else:
|
||||
module = inference_state.import_module((module_name,))
|
||||
|
||||
for attribute in module.py__getattribute__(attribute_name):
|
||||
return attribute.execute_with_values()
|
||||
|
||||
|
||||
@iterator_to_value_set
|
||||
def _get_foreign_key_values(cls, field_tree_instance):
|
||||
if isinstance(field_tree_instance, TreeInstance):
|
||||
# TODO private access..
|
||||
argument_iterator = field_tree_instance._arguments.unpack()
|
||||
key, lazy_values = next(argument_iterator, (None, None))
|
||||
if key is None and lazy_values is not None:
|
||||
for value in lazy_values.infer():
|
||||
if value.py__name__() == 'str':
|
||||
foreign_key_class_name = value.get_safe_value()
|
||||
module = cls.get_root_context()
|
||||
for v in module.py__getattribute__(foreign_key_class_name):
|
||||
if v.is_class():
|
||||
yield v
|
||||
elif value.is_class():
|
||||
yield value
|
||||
|
||||
|
||||
def _infer_field(cls, field_name, is_instance):
|
||||
inference_state = cls.inference_state
|
||||
result = field_name.infer()
|
||||
for field_tree_instance in result:
|
||||
scalar_field = _infer_scalar_field(
|
||||
inference_state, field_name, field_tree_instance, is_instance)
|
||||
if scalar_field is not None:
|
||||
return scalar_field
|
||||
|
||||
name = field_tree_instance.py__name__()
|
||||
is_many_to_many = name == 'ManyToManyField'
|
||||
if name in ('ForeignKey', 'OneToOneField') or is_many_to_many:
|
||||
if not is_instance:
|
||||
return _get_deferred_attributes(inference_state)
|
||||
|
||||
values = _get_foreign_key_values(cls, field_tree_instance)
|
||||
if is_many_to_many:
|
||||
return ValueSet(filter(None, [
|
||||
_create_manager_for(v, 'RelatedManager') for v in values
|
||||
]))
|
||||
else:
|
||||
return values.execute_with_values()
|
||||
|
||||
debug.dbg('django plugin: fail to infer `%s` from class `%s`',
|
||||
field_name.string_name, cls.py__name__())
|
||||
return result
|
||||
|
||||
|
||||
class DjangoModelName(NameWrapper):
|
||||
def __init__(self, cls, name, is_instance):
|
||||
super().__init__(name)
|
||||
self._cls = cls
|
||||
self._is_instance = is_instance
|
||||
|
||||
def infer(self):
|
||||
return _infer_field(self._cls, self._wrapped_name, self._is_instance)
|
||||
|
||||
|
||||
def _create_manager_for(cls, manager_cls='BaseManager'):
|
||||
managers = cls.inference_state.import_module(
|
||||
('django', 'db', 'models', 'manager')
|
||||
).py__getattribute__(manager_cls)
|
||||
for m in managers:
|
||||
if m.is_class_mixin():
|
||||
generics_manager = TupleGenericManager((ValueSet([cls]),))
|
||||
for c in GenericClass(m, generics_manager).execute_annotation():
|
||||
return c
|
||||
return None
|
||||
|
||||
|
||||
def _new_dict_filter(cls, is_instance):
|
||||
filters = list(cls.get_filters(
|
||||
is_instance=is_instance,
|
||||
include_metaclasses=False,
|
||||
include_type_when_class=False)
|
||||
)
|
||||
dct = {
|
||||
name.string_name: DjangoModelName(cls, name, is_instance)
|
||||
for filter_ in reversed(filters)
|
||||
for name in filter_.values()
|
||||
}
|
||||
if is_instance:
|
||||
# Replace the objects with a name that amounts to nothing when accessed
|
||||
# in an instance. This is not perfect and still completes "objects" in
|
||||
# that case, but it at least not inferes stuff like `.objects.filter`.
|
||||
# It would be nicer to do that in a better way, so that it also doesn't
|
||||
# show up in completions, but it's probably just not worth doing that
|
||||
# for the extra amount of work.
|
||||
dct['objects'] = EmptyCompiledName(cls.inference_state, 'objects')
|
||||
|
||||
return DictFilter(dct)
|
||||
|
||||
|
||||
def is_django_model_base(value):
|
||||
return value.py__name__() == 'ModelBase' \
|
||||
and value.get_root_context().py__name__() == 'django.db.models.base'
|
||||
|
||||
|
||||
def get_metaclass_filters(func):
|
||||
def wrapper(cls, metaclasses, is_instance):
|
||||
for metaclass in metaclasses:
|
||||
if is_django_model_base(metaclass):
|
||||
return [_new_dict_filter(cls, is_instance)]
|
||||
|
||||
return func(cls, metaclasses, is_instance)
|
||||
return wrapper
|
||||
|
||||
|
||||
def tree_name_to_values(func):
|
||||
def wrapper(inference_state, context, tree_name):
|
||||
result = func(inference_state, context, tree_name)
|
||||
if tree_name.value in _FILTER_LIKE_METHODS:
|
||||
# Here we try to overwrite stuff like User.objects.filter. We need
|
||||
# this to make sure that keyword param completion works on these
|
||||
# kind of methods.
|
||||
for v in result:
|
||||
if v.get_qualified_names() == ('_BaseQuerySet', tree_name.value) \
|
||||
and v.parent_context.is_module() \
|
||||
and v.parent_context.py__name__() == 'django.db.models.query':
|
||||
qs = context.get_value()
|
||||
generics = qs.get_generics()
|
||||
if len(generics) >= 1:
|
||||
return ValueSet(QuerySetMethodWrapper(v, model)
|
||||
for model in generics[0])
|
||||
|
||||
elif tree_name.value == 'BaseManager' and context.is_module() \
|
||||
and context.py__name__() == 'django.db.models.manager':
|
||||
return ValueSet(ManagerWrapper(r) for r in result)
|
||||
|
||||
elif tree_name.value == 'Field' and context.is_module() \
|
||||
and context.py__name__() == 'django.db.models.fields':
|
||||
return ValueSet(FieldWrapper(r) for r in result)
|
||||
return result
|
||||
return wrapper
|
||||
|
||||
|
||||
def _find_fields(cls):
|
||||
for name in _new_dict_filter(cls, is_instance=False).values():
|
||||
for value in name.infer():
|
||||
if value.name.get_qualified_names(include_module_names=True) \
|
||||
== ('django', 'db', 'models', 'query_utils', 'DeferredAttribute'):
|
||||
yield name
|
||||
|
||||
|
||||
def _get_signatures(cls):
|
||||
return [DjangoModelSignature(cls, field_names=list(_find_fields(cls)))]
|
||||
|
||||
|
||||
def get_metaclass_signatures(func):
|
||||
def wrapper(cls, metaclasses):
|
||||
for metaclass in metaclasses:
|
||||
if is_django_model_base(metaclass):
|
||||
return _get_signatures(cls)
|
||||
return func(cls, metaclass)
|
||||
return wrapper
|
||||
|
||||
|
||||
class ManagerWrapper(ValueWrapper):
|
||||
def py__getitem__(self, index_value_set, contextualized_node):
|
||||
return ValueSet(
|
||||
GenericManagerWrapper(generic)
|
||||
for generic in self._wrapped_value.py__getitem__(
|
||||
index_value_set, contextualized_node)
|
||||
)
|
||||
|
||||
|
||||
class GenericManagerWrapper(AttributeOverwrite, ClassMixin):
|
||||
def py__get__on_class(self, calling_instance, instance, class_value):
|
||||
return calling_instance.class_value.with_generics(
|
||||
(ValueSet({class_value}),)
|
||||
).py__call__(calling_instance._arguments)
|
||||
|
||||
def with_generics(self, generics_tuple):
|
||||
return self._wrapped_value.with_generics(generics_tuple)
|
||||
|
||||
|
||||
class FieldWrapper(ValueWrapper):
|
||||
def py__getitem__(self, index_value_set, contextualized_node):
|
||||
return ValueSet(
|
||||
GenericFieldWrapper(generic)
|
||||
for generic in self._wrapped_value.py__getitem__(
|
||||
index_value_set, contextualized_node)
|
||||
)
|
||||
|
||||
|
||||
class GenericFieldWrapper(AttributeOverwrite, ClassMixin):
|
||||
def py__get__on_class(self, calling_instance, instance, class_value):
|
||||
# This is mostly an optimization to avoid Jedi aborting inference,
|
||||
# because of too many function executions of Field.__get__.
|
||||
return ValueSet({calling_instance})
|
||||
|
||||
|
||||
class DjangoModelSignature(AbstractSignature):
|
||||
def __init__(self, value, field_names):
|
||||
super().__init__(value)
|
||||
self._field_names = field_names
|
||||
|
||||
def get_param_names(self, resolve_stars=False):
|
||||
return [DjangoParamName(name) for name in self._field_names]
|
||||
|
||||
|
||||
class DjangoParamName(BaseTreeParamName):
|
||||
def __init__(self, field_name):
|
||||
super().__init__(field_name.parent_context, field_name.tree_name)
|
||||
self._field_name = field_name
|
||||
|
||||
def get_kind(self):
|
||||
return Parameter.KEYWORD_ONLY
|
||||
|
||||
def infer(self):
|
||||
return self._field_name.infer()
|
||||
|
||||
|
||||
class QuerySetMethodWrapper(ValueWrapper):
|
||||
def __init__(self, method, model_cls):
|
||||
super().__init__(method)
|
||||
self._model_cls = model_cls
|
||||
|
||||
def py__get__(self, instance, class_value):
|
||||
return ValueSet({QuerySetBoundMethodWrapper(v, self._model_cls)
|
||||
for v in self._wrapped_value.py__get__(instance, class_value)})
|
||||
|
||||
|
||||
class QuerySetBoundMethodWrapper(ValueWrapper):
|
||||
def __init__(self, method, model_cls):
|
||||
super().__init__(method)
|
||||
self._model_cls = model_cls
|
||||
|
||||
def get_signatures(self):
|
||||
return _get_signatures(self._model_cls)
|
21
.venv/lib/python3.12/site-packages/jedi/plugins/flask.py
Normal file
21
.venv/lib/python3.12/site-packages/jedi/plugins/flask.py
Normal file
@@ -0,0 +1,21 @@
|
||||
def import_module(callback):
|
||||
"""
|
||||
Handle "magic" Flask extension imports:
|
||||
``flask.ext.foo`` is really ``flask_foo`` or ``flaskext.foo``.
|
||||
"""
|
||||
def wrapper(inference_state, import_names, module_context, *args, **kwargs):
|
||||
if len(import_names) == 3 and import_names[:2] == ('flask', 'ext'):
|
||||
# New style.
|
||||
ipath = ('flask_' + import_names[2]),
|
||||
value_set = callback(inference_state, ipath, None, *args, **kwargs)
|
||||
if value_set:
|
||||
return value_set
|
||||
value_set = callback(inference_state, ('flaskext',), None, *args, **kwargs)
|
||||
return callback(
|
||||
inference_state,
|
||||
('flaskext', import_names[2]),
|
||||
next(iter(value_set)),
|
||||
*args, **kwargs
|
||||
)
|
||||
return callback(inference_state, import_names, module_context, *args, **kwargs)
|
||||
return wrapper
|
269
.venv/lib/python3.12/site-packages/jedi/plugins/pytest.py
Normal file
269
.venv/lib/python3.12/site-packages/jedi/plugins/pytest.py
Normal file
@@ -0,0 +1,269 @@
|
||||
import sys
|
||||
from typing import List
|
||||
from pathlib import Path
|
||||
|
||||
from parso.tree import search_ancestor
|
||||
from jedi.inference.cache import inference_state_method_cache
|
||||
from jedi.inference.imports import goto_import, load_module_from_path
|
||||
from jedi.inference.filters import ParserTreeFilter
|
||||
from jedi.inference.base_value import NO_VALUES, ValueSet
|
||||
from jedi.inference.helpers import infer_call_of_leaf
|
||||
|
||||
_PYTEST_FIXTURE_MODULES = [
|
||||
('_pytest', 'monkeypatch'),
|
||||
('_pytest', 'capture'),
|
||||
('_pytest', 'logging'),
|
||||
('_pytest', 'tmpdir'),
|
||||
('_pytest', 'pytester'),
|
||||
]
|
||||
|
||||
|
||||
def execute(callback):
|
||||
def wrapper(value, arguments):
|
||||
# This might not be necessary anymore in pytest 4/5, definitely needed
|
||||
# for pytest 3.
|
||||
if value.py__name__() == 'fixture' \
|
||||
and value.parent_context.py__name__() == '_pytest.fixtures':
|
||||
return NO_VALUES
|
||||
|
||||
return callback(value, arguments)
|
||||
return wrapper
|
||||
|
||||
|
||||
def infer_anonymous_param(func):
|
||||
def get_returns(value):
|
||||
if value.tree_node.annotation is not None:
|
||||
result = value.execute_with_values()
|
||||
if any(v.name.get_qualified_names(include_module_names=True)
|
||||
== ('typing', 'Generator')
|
||||
for v in result):
|
||||
return ValueSet.from_sets(
|
||||
v.py__getattribute__('__next__').execute_annotation()
|
||||
for v in result
|
||||
)
|
||||
return result
|
||||
|
||||
# In pytest we need to differentiate between generators and normal
|
||||
# returns.
|
||||
# Parameters still need to be anonymous, .as_context() ensures that.
|
||||
function_context = value.as_context()
|
||||
if function_context.is_generator():
|
||||
return function_context.merge_yield_values()
|
||||
else:
|
||||
return function_context.get_return_values()
|
||||
|
||||
def wrapper(param_name):
|
||||
# parameters with an annotation do not need special handling
|
||||
if param_name.annotation_node:
|
||||
return func(param_name)
|
||||
is_pytest_param, param_name_is_function_name = \
|
||||
_is_a_pytest_param_and_inherited(param_name)
|
||||
if is_pytest_param:
|
||||
module = param_name.get_root_context()
|
||||
fixtures = _goto_pytest_fixture(
|
||||
module,
|
||||
param_name.string_name,
|
||||
# This skips the current module, because we are basically
|
||||
# inheriting a fixture from somewhere else.
|
||||
skip_own_module=param_name_is_function_name,
|
||||
)
|
||||
if fixtures:
|
||||
return ValueSet.from_sets(
|
||||
get_returns(value)
|
||||
for fixture in fixtures
|
||||
for value in fixture.infer()
|
||||
)
|
||||
return func(param_name)
|
||||
return wrapper
|
||||
|
||||
|
||||
def goto_anonymous_param(func):
|
||||
def wrapper(param_name):
|
||||
is_pytest_param, param_name_is_function_name = \
|
||||
_is_a_pytest_param_and_inherited(param_name)
|
||||
if is_pytest_param:
|
||||
names = _goto_pytest_fixture(
|
||||
param_name.get_root_context(),
|
||||
param_name.string_name,
|
||||
skip_own_module=param_name_is_function_name,
|
||||
)
|
||||
if names:
|
||||
return names
|
||||
return func(param_name)
|
||||
return wrapper
|
||||
|
||||
|
||||
def complete_param_names(func):
|
||||
def wrapper(context, func_name, decorator_nodes):
|
||||
module_context = context.get_root_context()
|
||||
if _is_pytest_func(func_name, decorator_nodes):
|
||||
names = []
|
||||
for module_context in _iter_pytest_modules(module_context):
|
||||
names += FixtureFilter(module_context).values()
|
||||
if names:
|
||||
return names
|
||||
return func(context, func_name, decorator_nodes)
|
||||
return wrapper
|
||||
|
||||
|
||||
def _goto_pytest_fixture(module_context, name, skip_own_module):
|
||||
for module_context in _iter_pytest_modules(module_context, skip_own_module=skip_own_module):
|
||||
names = FixtureFilter(module_context).get(name)
|
||||
if names:
|
||||
return names
|
||||
|
||||
|
||||
def _is_a_pytest_param_and_inherited(param_name):
|
||||
"""
|
||||
Pytest params are either in a `test_*` function or have a pytest fixture
|
||||
with the decorator @pytest.fixture.
|
||||
|
||||
This is a heuristic and will work in most cases.
|
||||
"""
|
||||
funcdef = search_ancestor(param_name.tree_name, 'funcdef')
|
||||
if funcdef is None: # A lambda
|
||||
return False, False
|
||||
decorators = funcdef.get_decorators()
|
||||
return _is_pytest_func(funcdef.name.value, decorators), \
|
||||
funcdef.name.value == param_name.string_name
|
||||
|
||||
|
||||
def _is_pytest_func(func_name, decorator_nodes):
|
||||
return func_name.startswith('test') \
|
||||
or any('fixture' in n.get_code() for n in decorator_nodes)
|
||||
|
||||
|
||||
def _find_pytest_plugin_modules() -> List[List[str]]:
|
||||
"""
|
||||
Finds pytest plugin modules hooked by setuptools entry points
|
||||
|
||||
See https://docs.pytest.org/en/stable/how-to/writing_plugins.html#setuptools-entry-points
|
||||
"""
|
||||
if sys.version_info >= (3, 8):
|
||||
from importlib.metadata import entry_points
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
pytest_entry_points = entry_points(group="pytest11")
|
||||
else:
|
||||
pytest_entry_points = entry_points().get("pytest11", ())
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
return [ep.module.split(".") for ep in pytest_entry_points]
|
||||
else:
|
||||
# Python 3.8 doesn't have `EntryPoint.module`. Implement equivalent
|
||||
# to what Python 3.9 does (with additional None check to placate `mypy`)
|
||||
matches = [
|
||||
ep.pattern.match(ep.value)
|
||||
for ep in pytest_entry_points
|
||||
]
|
||||
return [x.group('module').split(".") for x in matches if x]
|
||||
|
||||
else:
|
||||
from pkg_resources import iter_entry_points
|
||||
return [ep.module_name.split(".") for ep in iter_entry_points(group="pytest11")]
|
||||
|
||||
|
||||
@inference_state_method_cache()
|
||||
def _iter_pytest_modules(module_context, skip_own_module=False):
|
||||
if not skip_own_module:
|
||||
yield module_context
|
||||
|
||||
file_io = module_context.get_value().file_io
|
||||
if file_io is not None:
|
||||
folder = file_io.get_parent_folder()
|
||||
sys_path = module_context.inference_state.get_sys_path()
|
||||
|
||||
# prevent an infinite loop when reaching the root of the current drive
|
||||
last_folder = None
|
||||
|
||||
while any(folder.path.startswith(p) for p in sys_path):
|
||||
file_io = folder.get_file_io('conftest.py')
|
||||
if Path(file_io.path) != module_context.py__file__():
|
||||
try:
|
||||
m = load_module_from_path(module_context.inference_state, file_io)
|
||||
conftest_module = m.as_context()
|
||||
yield conftest_module
|
||||
|
||||
plugins_list = m.tree_node.get_used_names().get("pytest_plugins")
|
||||
if plugins_list:
|
||||
name = conftest_module.create_name(plugins_list[0])
|
||||
yield from _load_pytest_plugins(module_context, name)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
folder = folder.get_parent_folder()
|
||||
|
||||
# prevent an infinite for loop if the same parent folder is return twice
|
||||
if last_folder is not None and folder.path == last_folder.path:
|
||||
break
|
||||
last_folder = folder # keep track of the last found parent name
|
||||
|
||||
for names in _PYTEST_FIXTURE_MODULES + _find_pytest_plugin_modules():
|
||||
for module_value in module_context.inference_state.import_module(names):
|
||||
yield module_value.as_context()
|
||||
|
||||
|
||||
def _load_pytest_plugins(module_context, name):
|
||||
from jedi.inference.helpers import get_str_or_none
|
||||
|
||||
for inferred in name.infer():
|
||||
for seq_value in inferred.py__iter__():
|
||||
for value in seq_value.infer():
|
||||
fq_name = get_str_or_none(value)
|
||||
if fq_name:
|
||||
names = fq_name.split(".")
|
||||
for module_value in module_context.inference_state.import_module(names):
|
||||
yield module_value.as_context()
|
||||
|
||||
|
||||
class FixtureFilter(ParserTreeFilter):
|
||||
def _filter(self, names):
|
||||
for name in super()._filter(names):
|
||||
# look for fixture definitions of imported names
|
||||
if name.parent.type == "import_from":
|
||||
imported_names = goto_import(self.parent_context, name)
|
||||
if any(
|
||||
self._is_fixture(iname.parent_context, iname.tree_name)
|
||||
for iname in imported_names
|
||||
# discard imports of whole modules, that have no tree_name
|
||||
if iname.tree_name
|
||||
):
|
||||
yield name
|
||||
|
||||
elif self._is_fixture(self.parent_context, name):
|
||||
yield name
|
||||
|
||||
def _is_fixture(self, context, name):
|
||||
funcdef = name.parent
|
||||
# Class fixtures are not supported
|
||||
if funcdef.type != "funcdef":
|
||||
return False
|
||||
decorated = funcdef.parent
|
||||
if decorated.type != "decorated":
|
||||
return False
|
||||
decorators = decorated.children[0]
|
||||
if decorators.type == 'decorators':
|
||||
decorators = decorators.children
|
||||
else:
|
||||
decorators = [decorators]
|
||||
for decorator in decorators:
|
||||
dotted_name = decorator.children[1]
|
||||
# A heuristic, this makes it faster.
|
||||
if 'fixture' in dotted_name.get_code():
|
||||
if dotted_name.type == 'atom_expr':
|
||||
# Since Python3.9 a decorator does not have dotted names
|
||||
# anymore.
|
||||
last_trailer = dotted_name.children[-1]
|
||||
last_leaf = last_trailer.get_last_leaf()
|
||||
if last_leaf == ')':
|
||||
values = infer_call_of_leaf(
|
||||
context, last_leaf, cut_own_trailer=True
|
||||
)
|
||||
else:
|
||||
values = context.infer_node(dotted_name)
|
||||
else:
|
||||
values = context.infer_node(dotted_name)
|
||||
for value in values:
|
||||
if value.name.get_qualified_names(include_module_names=True) \
|
||||
== ('_pytest', 'fixtures', 'fixture'):
|
||||
return True
|
||||
return False
|
12
.venv/lib/python3.12/site-packages/jedi/plugins/registry.py
Normal file
12
.venv/lib/python3.12/site-packages/jedi/plugins/registry.py
Normal file
@@ -0,0 +1,12 @@
|
||||
"""
|
||||
This is not a plugin, this is just the place were plugins are registered.
|
||||
"""
|
||||
|
||||
from jedi.plugins import stdlib
|
||||
from jedi.plugins import flask
|
||||
from jedi.plugins import pytest
|
||||
from jedi.plugins import django
|
||||
from jedi.plugins import plugin_manager
|
||||
|
||||
|
||||
plugin_manager.register(stdlib, flask, pytest, django)
|
879
.venv/lib/python3.12/site-packages/jedi/plugins/stdlib.py
Normal file
879
.venv/lib/python3.12/site-packages/jedi/plugins/stdlib.py
Normal file
@@ -0,0 +1,879 @@
|
||||
"""
|
||||
Implementations of standard library functions, because it's not possible to
|
||||
understand them with Jedi.
|
||||
|
||||
To add a new implementation, create a function and add it to the
|
||||
``_implemented`` dict at the bottom of this module.
|
||||
|
||||
Note that this module exists only to implement very specific functionality in
|
||||
the standard library. The usual way to understand the standard library is the
|
||||
compiled module that returns the types for C-builtins.
|
||||
"""
|
||||
import parso
|
||||
import os
|
||||
from inspect import Parameter
|
||||
|
||||
from jedi import debug
|
||||
from jedi.inference.utils import safe_property
|
||||
from jedi.inference.helpers import get_str_or_none
|
||||
from jedi.inference.arguments import iterate_argument_clinic, ParamIssue, \
|
||||
repack_with_argument_clinic, AbstractArguments, TreeArgumentsWrapper
|
||||
from jedi.inference import analysis
|
||||
from jedi.inference import compiled
|
||||
from jedi.inference.value.instance import \
|
||||
AnonymousMethodExecutionContext, MethodExecutionContext
|
||||
from jedi.inference.base_value import ContextualizedNode, \
|
||||
NO_VALUES, ValueSet, ValueWrapper, LazyValueWrapper
|
||||
from jedi.inference.value import ClassValue, ModuleValue
|
||||
from jedi.inference.value.klass import ClassMixin
|
||||
from jedi.inference.value.function import FunctionMixin
|
||||
from jedi.inference.value import iterable
|
||||
from jedi.inference.lazy_value import LazyTreeValue, LazyKnownValue, \
|
||||
LazyKnownValues
|
||||
from jedi.inference.names import ValueName, BaseTreeParamName
|
||||
from jedi.inference.filters import AttributeOverwrite, publish_method, \
|
||||
ParserTreeFilter, DictFilter
|
||||
from jedi.inference.signature import AbstractSignature, SignatureWrapper
|
||||
|
||||
|
||||
# Copied from Python 3.6's stdlib.
|
||||
_NAMEDTUPLE_CLASS_TEMPLATE = """\
|
||||
_property = property
|
||||
_tuple = tuple
|
||||
from operator import itemgetter as _itemgetter
|
||||
from collections import OrderedDict
|
||||
|
||||
class {typename}(tuple):
|
||||
__slots__ = ()
|
||||
|
||||
_fields = {field_names!r}
|
||||
|
||||
def __new__(_cls, {arg_list}):
|
||||
'Create new instance of {typename}({arg_list})'
|
||||
return _tuple.__new__(_cls, ({arg_list}))
|
||||
|
||||
@classmethod
|
||||
def _make(cls, iterable, new=tuple.__new__, len=len):
|
||||
'Make a new {typename} object from a sequence or iterable'
|
||||
result = new(cls, iterable)
|
||||
if len(result) != {num_fields:d}:
|
||||
raise TypeError('Expected {num_fields:d} arguments, got %d' % len(result))
|
||||
return result
|
||||
|
||||
def _replace(_self, **kwds):
|
||||
'Return a new {typename} object replacing specified fields with new values'
|
||||
result = _self._make(map(kwds.pop, {field_names!r}, _self))
|
||||
if kwds:
|
||||
raise ValueError('Got unexpected field names: %r' % list(kwds))
|
||||
return result
|
||||
|
||||
def __repr__(self):
|
||||
'Return a nicely formatted representation string'
|
||||
return self.__class__.__name__ + '({repr_fmt})' % self
|
||||
|
||||
def _asdict(self):
|
||||
'Return a new OrderedDict which maps field names to their values.'
|
||||
return OrderedDict(zip(self._fields, self))
|
||||
|
||||
def __getnewargs__(self):
|
||||
'Return self as a plain tuple. Used by copy and pickle.'
|
||||
return tuple(self)
|
||||
|
||||
# These methods were added by Jedi.
|
||||
# __new__ doesn't really work with Jedi. So adding this to nametuples seems
|
||||
# like the easiest way.
|
||||
def __init__(self, {arg_list}):
|
||||
'A helper function for namedtuple.'
|
||||
self.__iterable = ({arg_list})
|
||||
|
||||
def __iter__(self):
|
||||
for i in self.__iterable:
|
||||
yield i
|
||||
|
||||
def __getitem__(self, y):
|
||||
return self.__iterable[y]
|
||||
|
||||
{field_defs}
|
||||
"""
|
||||
|
||||
_NAMEDTUPLE_FIELD_TEMPLATE = '''\
|
||||
{name} = _property(_itemgetter({index:d}), doc='Alias for field number {index:d}')
|
||||
'''
|
||||
|
||||
|
||||
def execute(callback):
|
||||
def wrapper(value, arguments):
|
||||
def call():
|
||||
return callback(value, arguments=arguments)
|
||||
|
||||
try:
|
||||
obj_name = value.name.string_name
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
p = value.parent_context
|
||||
if p is not None and p.is_builtins_module():
|
||||
module_name = 'builtins'
|
||||
elif p is not None and p.is_module():
|
||||
module_name = p.py__name__()
|
||||
else:
|
||||
return call()
|
||||
|
||||
if value.is_bound_method() or value.is_instance():
|
||||
# value can be an instance for example if it is a partial
|
||||
# object.
|
||||
return call()
|
||||
|
||||
# for now we just support builtin functions.
|
||||
try:
|
||||
func = _implemented[module_name][obj_name]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
return func(value, arguments=arguments, callback=call)
|
||||
return call()
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def _follow_param(inference_state, arguments, index):
|
||||
try:
|
||||
key, lazy_value = list(arguments.unpack())[index]
|
||||
except IndexError:
|
||||
return NO_VALUES
|
||||
else:
|
||||
return lazy_value.infer()
|
||||
|
||||
|
||||
def argument_clinic(clinic_string, want_value=False, want_context=False,
|
||||
want_arguments=False, want_inference_state=False,
|
||||
want_callback=False):
|
||||
"""
|
||||
Works like Argument Clinic (PEP 436), to validate function params.
|
||||
"""
|
||||
|
||||
def f(func):
|
||||
def wrapper(value, arguments, callback):
|
||||
try:
|
||||
args = tuple(iterate_argument_clinic(
|
||||
value.inference_state, arguments, clinic_string))
|
||||
except ParamIssue:
|
||||
return NO_VALUES
|
||||
|
||||
debug.dbg('builtin start %s' % value, color='MAGENTA')
|
||||
kwargs = {}
|
||||
if want_context:
|
||||
kwargs['context'] = arguments.context
|
||||
if want_value:
|
||||
kwargs['value'] = value
|
||||
if want_inference_state:
|
||||
kwargs['inference_state'] = value.inference_state
|
||||
if want_arguments:
|
||||
kwargs['arguments'] = arguments
|
||||
if want_callback:
|
||||
kwargs['callback'] = callback
|
||||
result = func(*args, **kwargs)
|
||||
debug.dbg('builtin end: %s', result, color='MAGENTA')
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
return f
|
||||
|
||||
|
||||
@argument_clinic('iterator[, default], /', want_inference_state=True)
|
||||
def builtins_next(iterators, defaults, inference_state):
|
||||
# TODO theoretically we have to check here if something is an iterator.
|
||||
# That is probably done by checking if it's not a class.
|
||||
return defaults | iterators.py__getattribute__('__next__').execute_with_values()
|
||||
|
||||
|
||||
@argument_clinic('iterator[, default], /')
|
||||
def builtins_iter(iterators_or_callables, defaults):
|
||||
# TODO implement this if it's a callable.
|
||||
return iterators_or_callables.py__getattribute__('__iter__').execute_with_values()
|
||||
|
||||
|
||||
@argument_clinic('object, name[, default], /')
|
||||
def builtins_getattr(objects, names, defaults=None):
|
||||
# follow the first param
|
||||
for value in objects:
|
||||
for name in names:
|
||||
string = get_str_or_none(name)
|
||||
if string is None:
|
||||
debug.warning('getattr called without str')
|
||||
continue
|
||||
else:
|
||||
return value.py__getattribute__(string)
|
||||
return NO_VALUES
|
||||
|
||||
|
||||
@argument_clinic('object[, bases, dict], /')
|
||||
def builtins_type(objects, bases, dicts):
|
||||
if bases or dicts:
|
||||
# It's a type creation... maybe someday...
|
||||
return NO_VALUES
|
||||
else:
|
||||
return objects.py__class__()
|
||||
|
||||
|
||||
class SuperInstance(LazyValueWrapper):
|
||||
"""To be used like the object ``super`` returns."""
|
||||
def __init__(self, inference_state, instance):
|
||||
self.inference_state = inference_state
|
||||
self._instance = instance # Corresponds to super().__self__
|
||||
|
||||
def _get_bases(self):
|
||||
return self._instance.py__class__().py__bases__()
|
||||
|
||||
def _get_wrapped_value(self):
|
||||
objs = self._get_bases()[0].infer().execute_with_values()
|
||||
if not objs:
|
||||
# This is just a fallback and will only be used, if it's not
|
||||
# possible to find a class
|
||||
return self._instance
|
||||
return next(iter(objs))
|
||||
|
||||
def get_filters(self, origin_scope=None):
|
||||
for b in self._get_bases():
|
||||
for value in b.infer().execute_with_values():
|
||||
for f in value.get_filters():
|
||||
yield f
|
||||
|
||||
|
||||
@argument_clinic('[type[, value]], /', want_context=True)
|
||||
def builtins_super(types, objects, context):
|
||||
instance = None
|
||||
if isinstance(context, AnonymousMethodExecutionContext):
|
||||
instance = context.instance
|
||||
elif isinstance(context, MethodExecutionContext):
|
||||
instance = context.instance
|
||||
if instance is None:
|
||||
return NO_VALUES
|
||||
return ValueSet({SuperInstance(instance.inference_state, instance)})
|
||||
|
||||
|
||||
class ReversedObject(AttributeOverwrite):
|
||||
def __init__(self, reversed_obj, iter_list):
|
||||
super().__init__(reversed_obj)
|
||||
self._iter_list = iter_list
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
return self._iter_list
|
||||
|
||||
@publish_method('__next__')
|
||||
def _next(self, arguments):
|
||||
return ValueSet.from_sets(
|
||||
lazy_value.infer() for lazy_value in self._iter_list
|
||||
)
|
||||
|
||||
|
||||
@argument_clinic('sequence, /', want_value=True, want_arguments=True)
|
||||
def builtins_reversed(sequences, value, arguments):
|
||||
# While we could do without this variable (just by using sequences), we
|
||||
# want static analysis to work well. Therefore we need to generated the
|
||||
# values again.
|
||||
key, lazy_value = next(arguments.unpack())
|
||||
cn = None
|
||||
if isinstance(lazy_value, LazyTreeValue):
|
||||
cn = ContextualizedNode(lazy_value.context, lazy_value.data)
|
||||
ordered = list(sequences.iterate(cn))
|
||||
|
||||
# Repack iterator values and then run it the normal way. This is
|
||||
# necessary, because `reversed` is a function and autocompletion
|
||||
# would fail in certain cases like `reversed(x).__iter__` if we
|
||||
# just returned the result directly.
|
||||
seq, = value.inference_state.typing_module.py__getattribute__('Iterator').execute_with_values()
|
||||
return ValueSet([ReversedObject(seq, list(reversed(ordered)))])
|
||||
|
||||
|
||||
@argument_clinic('value, type, /', want_arguments=True, want_inference_state=True)
|
||||
def builtins_isinstance(objects, types, arguments, inference_state):
|
||||
bool_results = set()
|
||||
for o in objects:
|
||||
cls = o.py__class__()
|
||||
try:
|
||||
cls.py__bases__
|
||||
except AttributeError:
|
||||
# This is temporary. Everything should have a class attribute in
|
||||
# Python?! Maybe we'll leave it here, because some numpy objects or
|
||||
# whatever might not.
|
||||
bool_results = set([True, False])
|
||||
break
|
||||
|
||||
mro = list(cls.py__mro__())
|
||||
|
||||
for cls_or_tup in types:
|
||||
if cls_or_tup.is_class():
|
||||
bool_results.add(cls_or_tup in mro)
|
||||
elif cls_or_tup.name.string_name == 'tuple' \
|
||||
and cls_or_tup.get_root_context().is_builtins_module():
|
||||
# Check for tuples.
|
||||
classes = ValueSet.from_sets(
|
||||
lazy_value.infer()
|
||||
for lazy_value in cls_or_tup.iterate()
|
||||
)
|
||||
bool_results.add(any(cls in mro for cls in classes))
|
||||
else:
|
||||
_, lazy_value = list(arguments.unpack())[1]
|
||||
if isinstance(lazy_value, LazyTreeValue):
|
||||
node = lazy_value.data
|
||||
message = 'TypeError: isinstance() arg 2 must be a ' \
|
||||
'class, type, or tuple of classes and types, ' \
|
||||
'not %s.' % cls_or_tup
|
||||
analysis.add(lazy_value.context, 'type-error-isinstance', node, message)
|
||||
|
||||
return ValueSet(
|
||||
compiled.builtin_from_name(inference_state, str(b))
|
||||
for b in bool_results
|
||||
)
|
||||
|
||||
|
||||
class StaticMethodObject(ValueWrapper):
|
||||
def py__get__(self, instance, class_value):
|
||||
return ValueSet([self._wrapped_value])
|
||||
|
||||
|
||||
@argument_clinic('sequence, /')
|
||||
def builtins_staticmethod(functions):
|
||||
return ValueSet(StaticMethodObject(f) for f in functions)
|
||||
|
||||
|
||||
class ClassMethodObject(ValueWrapper):
|
||||
def __init__(self, class_method_obj, function):
|
||||
super().__init__(class_method_obj)
|
||||
self._function = function
|
||||
|
||||
def py__get__(self, instance, class_value):
|
||||
return ValueSet([
|
||||
ClassMethodGet(__get__, class_value, self._function)
|
||||
for __get__ in self._wrapped_value.py__getattribute__('__get__')
|
||||
])
|
||||
|
||||
|
||||
class ClassMethodGet(ValueWrapper):
|
||||
def __init__(self, get_method, klass, function):
|
||||
super().__init__(get_method)
|
||||
self._class = klass
|
||||
self._function = function
|
||||
|
||||
def get_signatures(self):
|
||||
return [sig.bind(self._function) for sig in self._function.get_signatures()]
|
||||
|
||||
def py__call__(self, arguments):
|
||||
return self._function.execute(ClassMethodArguments(self._class, arguments))
|
||||
|
||||
|
||||
class ClassMethodArguments(TreeArgumentsWrapper):
|
||||
def __init__(self, klass, arguments):
|
||||
super().__init__(arguments)
|
||||
self._class = klass
|
||||
|
||||
def unpack(self, func=None):
|
||||
yield None, LazyKnownValue(self._class)
|
||||
for values in self._wrapped_arguments.unpack(func):
|
||||
yield values
|
||||
|
||||
|
||||
@argument_clinic('sequence, /', want_value=True, want_arguments=True)
|
||||
def builtins_classmethod(functions, value, arguments):
|
||||
return ValueSet(
|
||||
ClassMethodObject(class_method_object, function)
|
||||
for class_method_object in value.py__call__(arguments=arguments)
|
||||
for function in functions
|
||||
)
|
||||
|
||||
|
||||
class PropertyObject(AttributeOverwrite, ValueWrapper):
|
||||
api_type = 'property'
|
||||
|
||||
def __init__(self, property_obj, function):
|
||||
super().__init__(property_obj)
|
||||
self._function = function
|
||||
|
||||
def py__get__(self, instance, class_value):
|
||||
if instance is None:
|
||||
return ValueSet([self])
|
||||
return self._function.execute_with_values(instance)
|
||||
|
||||
@publish_method('deleter')
|
||||
@publish_method('getter')
|
||||
@publish_method('setter')
|
||||
def _return_self(self, arguments):
|
||||
return ValueSet({self})
|
||||
|
||||
|
||||
@argument_clinic('func, /', want_callback=True)
|
||||
def builtins_property(functions, callback):
|
||||
return ValueSet(
|
||||
PropertyObject(property_value, function)
|
||||
for property_value in callback()
|
||||
for function in functions
|
||||
)
|
||||
|
||||
|
||||
def collections_namedtuple(value, arguments, callback):
|
||||
"""
|
||||
Implementation of the namedtuple function.
|
||||
|
||||
This has to be done by processing the namedtuple class template and
|
||||
inferring the result.
|
||||
|
||||
"""
|
||||
inference_state = value.inference_state
|
||||
|
||||
# Process arguments
|
||||
name = 'jedi_unknown_namedtuple'
|
||||
for c in _follow_param(inference_state, arguments, 0):
|
||||
x = get_str_or_none(c)
|
||||
if x is not None:
|
||||
name = x
|
||||
break
|
||||
|
||||
# TODO here we only use one of the types, we should use all.
|
||||
param_values = _follow_param(inference_state, arguments, 1)
|
||||
if not param_values:
|
||||
return NO_VALUES
|
||||
_fields = list(param_values)[0]
|
||||
string = get_str_or_none(_fields)
|
||||
if string is not None:
|
||||
fields = string.replace(',', ' ').split()
|
||||
elif isinstance(_fields, iterable.Sequence):
|
||||
fields = [
|
||||
get_str_or_none(v)
|
||||
for lazy_value in _fields.py__iter__()
|
||||
for v in lazy_value.infer()
|
||||
]
|
||||
fields = [f for f in fields if f is not None]
|
||||
else:
|
||||
return NO_VALUES
|
||||
|
||||
# Build source code
|
||||
code = _NAMEDTUPLE_CLASS_TEMPLATE.format(
|
||||
typename=name,
|
||||
field_names=tuple(fields),
|
||||
num_fields=len(fields),
|
||||
arg_list=repr(tuple(fields)).replace("'", "")[1:-1],
|
||||
repr_fmt='',
|
||||
field_defs='\n'.join(_NAMEDTUPLE_FIELD_TEMPLATE.format(index=index, name=name)
|
||||
for index, name in enumerate(fields))
|
||||
)
|
||||
|
||||
# Parse source code
|
||||
module = inference_state.grammar.parse(code)
|
||||
generated_class = next(module.iter_classdefs())
|
||||
parent_context = ModuleValue(
|
||||
inference_state, module,
|
||||
code_lines=parso.split_lines(code, keepends=True),
|
||||
).as_context()
|
||||
|
||||
return ValueSet([ClassValue(inference_state, parent_context, generated_class)])
|
||||
|
||||
|
||||
class PartialObject(ValueWrapper):
|
||||
def __init__(self, actual_value, arguments, instance=None):
|
||||
super().__init__(actual_value)
|
||||
self._arguments = arguments
|
||||
self._instance = instance
|
||||
|
||||
def _get_functions(self, unpacked_arguments):
|
||||
key, lazy_value = next(unpacked_arguments, (None, None))
|
||||
if key is not None or lazy_value is None:
|
||||
debug.warning("Partial should have a proper function %s", self._arguments)
|
||||
return None
|
||||
return lazy_value.infer()
|
||||
|
||||
def get_signatures(self):
|
||||
unpacked_arguments = self._arguments.unpack()
|
||||
funcs = self._get_functions(unpacked_arguments)
|
||||
if funcs is None:
|
||||
return []
|
||||
|
||||
arg_count = 0
|
||||
if self._instance is not None:
|
||||
arg_count = 1
|
||||
keys = set()
|
||||
for key, _ in unpacked_arguments:
|
||||
if key is None:
|
||||
arg_count += 1
|
||||
else:
|
||||
keys.add(key)
|
||||
return [PartialSignature(s, arg_count, keys) for s in funcs.get_signatures()]
|
||||
|
||||
def py__call__(self, arguments):
|
||||
funcs = self._get_functions(self._arguments.unpack())
|
||||
if funcs is None:
|
||||
return NO_VALUES
|
||||
|
||||
return funcs.execute(
|
||||
MergedPartialArguments(self._arguments, arguments, self._instance)
|
||||
)
|
||||
|
||||
def py__doc__(self):
|
||||
"""
|
||||
In CPython partial does not replace the docstring. However we are still
|
||||
imitating it here, because we want this docstring to be worth something
|
||||
for the user.
|
||||
"""
|
||||
callables = self._get_functions(self._arguments.unpack())
|
||||
if callables is None:
|
||||
return ''
|
||||
for callable_ in callables:
|
||||
return callable_.py__doc__()
|
||||
return ''
|
||||
|
||||
def py__get__(self, instance, class_value):
|
||||
return ValueSet([self])
|
||||
|
||||
|
||||
class PartialMethodObject(PartialObject):
|
||||
def py__get__(self, instance, class_value):
|
||||
if instance is None:
|
||||
return ValueSet([self])
|
||||
return ValueSet([PartialObject(self._wrapped_value, self._arguments, instance)])
|
||||
|
||||
|
||||
class PartialSignature(SignatureWrapper):
|
||||
def __init__(self, wrapped_signature, skipped_arg_count, skipped_arg_set):
|
||||
super().__init__(wrapped_signature)
|
||||
self._skipped_arg_count = skipped_arg_count
|
||||
self._skipped_arg_set = skipped_arg_set
|
||||
|
||||
def get_param_names(self, resolve_stars=False):
|
||||
names = self._wrapped_signature.get_param_names()[self._skipped_arg_count:]
|
||||
return [n for n in names if n.string_name not in self._skipped_arg_set]
|
||||
|
||||
|
||||
class MergedPartialArguments(AbstractArguments):
|
||||
def __init__(self, partial_arguments, call_arguments, instance=None):
|
||||
self._partial_arguments = partial_arguments
|
||||
self._call_arguments = call_arguments
|
||||
self._instance = instance
|
||||
|
||||
def unpack(self, funcdef=None):
|
||||
unpacked = self._partial_arguments.unpack(funcdef)
|
||||
# Ignore this one, it's the function. It was checked before that it's
|
||||
# there.
|
||||
next(unpacked, None)
|
||||
if self._instance is not None:
|
||||
yield None, LazyKnownValue(self._instance)
|
||||
for key_lazy_value in unpacked:
|
||||
yield key_lazy_value
|
||||
for key_lazy_value in self._call_arguments.unpack(funcdef):
|
||||
yield key_lazy_value
|
||||
|
||||
|
||||
def functools_partial(value, arguments, callback):
|
||||
return ValueSet(
|
||||
PartialObject(instance, arguments)
|
||||
for instance in value.py__call__(arguments)
|
||||
)
|
||||
|
||||
|
||||
def functools_partialmethod(value, arguments, callback):
|
||||
return ValueSet(
|
||||
PartialMethodObject(instance, arguments)
|
||||
for instance in value.py__call__(arguments)
|
||||
)
|
||||
|
||||
|
||||
@argument_clinic('first, /')
|
||||
def _return_first_param(firsts):
|
||||
return firsts
|
||||
|
||||
|
||||
@argument_clinic('seq')
|
||||
def _random_choice(sequences):
|
||||
return ValueSet.from_sets(
|
||||
lazy_value.infer()
|
||||
for sequence in sequences
|
||||
for lazy_value in sequence.py__iter__()
|
||||
)
|
||||
|
||||
|
||||
def _dataclass(value, arguments, callback):
|
||||
for c in _follow_param(value.inference_state, arguments, 0):
|
||||
if c.is_class():
|
||||
return ValueSet([DataclassWrapper(c)])
|
||||
else:
|
||||
return ValueSet([value])
|
||||
return NO_VALUES
|
||||
|
||||
|
||||
class DataclassWrapper(ValueWrapper, ClassMixin):
|
||||
def get_signatures(self):
|
||||
param_names = []
|
||||
for cls in reversed(list(self.py__mro__())):
|
||||
if isinstance(cls, DataclassWrapper):
|
||||
filter_ = cls.as_context().get_global_filter()
|
||||
# .values ordering is not guaranteed, at least not in
|
||||
# Python < 3.6, when dicts where not ordered, which is an
|
||||
# implementation detail anyway.
|
||||
for name in sorted(filter_.values(), key=lambda name: name.start_pos):
|
||||
d = name.tree_name.get_definition()
|
||||
annassign = d.children[1]
|
||||
if d.type == 'expr_stmt' and annassign.type == 'annassign':
|
||||
if len(annassign.children) < 4:
|
||||
default = None
|
||||
else:
|
||||
default = annassign.children[3]
|
||||
param_names.append(DataclassParamName(
|
||||
parent_context=cls.parent_context,
|
||||
tree_name=name.tree_name,
|
||||
annotation_node=annassign.children[1],
|
||||
default_node=default,
|
||||
))
|
||||
return [DataclassSignature(cls, param_names)]
|
||||
|
||||
|
||||
class DataclassSignature(AbstractSignature):
|
||||
def __init__(self, value, param_names):
|
||||
super().__init__(value)
|
||||
self._param_names = param_names
|
||||
|
||||
def get_param_names(self, resolve_stars=False):
|
||||
return self._param_names
|
||||
|
||||
|
||||
class DataclassParamName(BaseTreeParamName):
|
||||
def __init__(self, parent_context, tree_name, annotation_node, default_node):
|
||||
super().__init__(parent_context, tree_name)
|
||||
self.annotation_node = annotation_node
|
||||
self.default_node = default_node
|
||||
|
||||
def get_kind(self):
|
||||
return Parameter.POSITIONAL_OR_KEYWORD
|
||||
|
||||
def infer(self):
|
||||
if self.annotation_node is None:
|
||||
return NO_VALUES
|
||||
else:
|
||||
return self.parent_context.infer_node(self.annotation_node)
|
||||
|
||||
|
||||
class ItemGetterCallable(ValueWrapper):
|
||||
def __init__(self, instance, args_value_set):
|
||||
super().__init__(instance)
|
||||
self._args_value_set = args_value_set
|
||||
|
||||
@repack_with_argument_clinic('item, /')
|
||||
def py__call__(self, item_value_set):
|
||||
value_set = NO_VALUES
|
||||
for args_value in self._args_value_set:
|
||||
lazy_values = list(args_value.py__iter__())
|
||||
if len(lazy_values) == 1:
|
||||
# TODO we need to add the contextualized value.
|
||||
value_set |= item_value_set.get_item(lazy_values[0].infer(), None)
|
||||
else:
|
||||
value_set |= ValueSet([iterable.FakeList(
|
||||
self._wrapped_value.inference_state,
|
||||
[
|
||||
LazyKnownValues(item_value_set.get_item(lazy_value.infer(), None))
|
||||
for lazy_value in lazy_values
|
||||
],
|
||||
)])
|
||||
return value_set
|
||||
|
||||
|
||||
@argument_clinic('func, /')
|
||||
def _functools_wraps(funcs):
|
||||
return ValueSet(WrapsCallable(func) for func in funcs)
|
||||
|
||||
|
||||
class WrapsCallable(ValueWrapper):
|
||||
# XXX this is not the correct wrapped value, it should be a weird
|
||||
# partials object, but it doesn't matter, because it's always used as a
|
||||
# decorator anyway.
|
||||
@repack_with_argument_clinic('func, /')
|
||||
def py__call__(self, funcs):
|
||||
return ValueSet({Wrapped(func, self._wrapped_value) for func in funcs})
|
||||
|
||||
|
||||
class Wrapped(ValueWrapper, FunctionMixin):
|
||||
def __init__(self, func, original_function):
|
||||
super().__init__(func)
|
||||
self._original_function = original_function
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._original_function.name
|
||||
|
||||
def get_signature_functions(self):
|
||||
return [self]
|
||||
|
||||
|
||||
@argument_clinic('*args, /', want_value=True, want_arguments=True)
|
||||
def _operator_itemgetter(args_value_set, value, arguments):
|
||||
return ValueSet([
|
||||
ItemGetterCallable(instance, args_value_set)
|
||||
for instance in value.py__call__(arguments)
|
||||
])
|
||||
|
||||
|
||||
def _create_string_input_function(func):
|
||||
@argument_clinic('string, /', want_value=True, want_arguments=True)
|
||||
def wrapper(strings, value, arguments):
|
||||
def iterate():
|
||||
for value in strings:
|
||||
s = get_str_or_none(value)
|
||||
if s is not None:
|
||||
s = func(s)
|
||||
yield compiled.create_simple_object(value.inference_state, s)
|
||||
values = ValueSet(iterate())
|
||||
if values:
|
||||
return values
|
||||
return value.py__call__(arguments)
|
||||
return wrapper
|
||||
|
||||
|
||||
@argument_clinic('*args, /', want_callback=True)
|
||||
def _os_path_join(args_set, callback):
|
||||
if len(args_set) == 1:
|
||||
string = ''
|
||||
sequence, = args_set
|
||||
is_first = True
|
||||
for lazy_value in sequence.py__iter__():
|
||||
string_values = lazy_value.infer()
|
||||
if len(string_values) != 1:
|
||||
break
|
||||
s = get_str_or_none(next(iter(string_values)))
|
||||
if s is None:
|
||||
break
|
||||
if not is_first:
|
||||
string += os.path.sep
|
||||
string += s
|
||||
is_first = False
|
||||
else:
|
||||
return ValueSet([compiled.create_simple_object(sequence.inference_state, string)])
|
||||
return callback()
|
||||
|
||||
|
||||
_implemented = {
|
||||
'builtins': {
|
||||
'getattr': builtins_getattr,
|
||||
'type': builtins_type,
|
||||
'super': builtins_super,
|
||||
'reversed': builtins_reversed,
|
||||
'isinstance': builtins_isinstance,
|
||||
'next': builtins_next,
|
||||
'iter': builtins_iter,
|
||||
'staticmethod': builtins_staticmethod,
|
||||
'classmethod': builtins_classmethod,
|
||||
'property': builtins_property,
|
||||
},
|
||||
'copy': {
|
||||
'copy': _return_first_param,
|
||||
'deepcopy': _return_first_param,
|
||||
},
|
||||
'json': {
|
||||
'load': lambda value, arguments, callback: NO_VALUES,
|
||||
'loads': lambda value, arguments, callback: NO_VALUES,
|
||||
},
|
||||
'collections': {
|
||||
'namedtuple': collections_namedtuple,
|
||||
},
|
||||
'functools': {
|
||||
'partial': functools_partial,
|
||||
'partialmethod': functools_partialmethod,
|
||||
'wraps': _functools_wraps,
|
||||
},
|
||||
'_weakref': {
|
||||
'proxy': _return_first_param,
|
||||
},
|
||||
'random': {
|
||||
'choice': _random_choice,
|
||||
},
|
||||
'operator': {
|
||||
'itemgetter': _operator_itemgetter,
|
||||
},
|
||||
'abc': {
|
||||
# Not sure if this is necessary, but it's used a lot in typeshed and
|
||||
# it's for now easier to just pass the function.
|
||||
'abstractmethod': _return_first_param,
|
||||
},
|
||||
'typing': {
|
||||
# The _alias function just leads to some annoying type inference.
|
||||
# Therefore, just make it return nothing, which leads to the stubs
|
||||
# being used instead. This only matters for 3.7+.
|
||||
'_alias': lambda value, arguments, callback: NO_VALUES,
|
||||
# runtime_checkable doesn't really change anything and is just
|
||||
# adding logs for infering stuff, so we can safely ignore it.
|
||||
'runtime_checkable': lambda value, arguments, callback: NO_VALUES,
|
||||
},
|
||||
'dataclasses': {
|
||||
# For now this works at least better than Jedi trying to understand it.
|
||||
'dataclass': _dataclass
|
||||
},
|
||||
# attrs exposes declaration interface roughly compatible with dataclasses
|
||||
# via attrs.define, attrs.frozen and attrs.mutable
|
||||
# https://www.attrs.org/en/stable/names.html
|
||||
'attr': {
|
||||
'define': _dataclass,
|
||||
'frozen': _dataclass,
|
||||
},
|
||||
'attrs': {
|
||||
'define': _dataclass,
|
||||
'frozen': _dataclass,
|
||||
},
|
||||
'os.path': {
|
||||
'dirname': _create_string_input_function(os.path.dirname),
|
||||
'abspath': _create_string_input_function(os.path.abspath),
|
||||
'relpath': _create_string_input_function(os.path.relpath),
|
||||
'join': _os_path_join,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def get_metaclass_filters(func):
|
||||
def wrapper(cls, metaclasses, is_instance):
|
||||
for metaclass in metaclasses:
|
||||
if metaclass.py__name__() == 'EnumMeta' \
|
||||
and metaclass.get_root_context().py__name__() == 'enum':
|
||||
filter_ = ParserTreeFilter(parent_context=cls.as_context())
|
||||
return [DictFilter({
|
||||
name.string_name: EnumInstance(cls, name).name
|
||||
for name in filter_.values()
|
||||
})]
|
||||
return func(cls, metaclasses, is_instance)
|
||||
return wrapper
|
||||
|
||||
|
||||
class EnumInstance(LazyValueWrapper):
|
||||
def __init__(self, cls, name):
|
||||
self.inference_state = cls.inference_state
|
||||
self._cls = cls # Corresponds to super().__self__
|
||||
self._name = name
|
||||
self.tree_node = self._name.tree_name
|
||||
|
||||
@safe_property
|
||||
def name(self):
|
||||
return ValueName(self, self._name.tree_name)
|
||||
|
||||
def _get_wrapped_value(self):
|
||||
n = self._name.string_name
|
||||
if n.startswith('__') and n.endswith('__') or self._name.api_type == 'function':
|
||||
inferred = self._name.infer()
|
||||
if inferred:
|
||||
return next(iter(inferred))
|
||||
o, = self.inference_state.builtins_module.py__getattribute__('object')
|
||||
return o
|
||||
|
||||
value, = self._cls.execute_with_values()
|
||||
return value
|
||||
|
||||
def get_filters(self, origin_scope=None):
|
||||
yield DictFilter(dict(
|
||||
name=compiled.create_simple_object(self.inference_state, self._name.string_name).name,
|
||||
value=self._name,
|
||||
))
|
||||
for f in self._get_wrapped_value().get_filters():
|
||||
yield f
|
||||
|
||||
|
||||
def tree_name_to_values(func):
|
||||
def wrapper(inference_state, context, tree_name):
|
||||
if tree_name.value == 'sep' and context.is_module() and context.py__name__() == 'os.path':
|
||||
return ValueSet({
|
||||
compiled.create_simple_object(inference_state, os.path.sep),
|
||||
})
|
||||
return func(inference_state, context, tree_name)
|
||||
return wrapper
|
Reference in New Issue
Block a user