summaryrefslogtreecommitdiff
path: root/Cython/Compiler/Nodes.py
diff options
context:
space:
mode:
Diffstat (limited to 'Cython/Compiler/Nodes.py')
-rw-r--r--Cython/Compiler/Nodes.py301
1 files changed, 215 insertions, 86 deletions
diff --git a/Cython/Compiler/Nodes.py b/Cython/Compiler/Nodes.py
index 4cad762ab..5c3321326 100644
--- a/Cython/Compiler/Nodes.py
+++ b/Cython/Compiler/Nodes.py
@@ -158,6 +158,7 @@ class Node(object):
is_terminator = 0
is_wrapper = False # is a DefNode wrapper for a C function
is_cproperty = False
+ is_templated_type_node = False
temps = None
# All descendants should set child_attrs to a list of the attributes
@@ -729,13 +730,15 @@ class CFuncDeclaratorNode(CDeclaratorNode):
# Use an explicit exception return value to speed up exception checks.
# Even if it is not declared, we can use the default exception value of the return type,
# unless the function is some kind of external function that we do not control.
- if (return_type.exception_value is not None and (visibility != 'extern' and not in_pxd)
- # Ideally the function-pointer test would be better after self.base is analysed
- # however that is hard to do with the current implementation so it lives here
- # for now
- and not isinstance(self.base, CPtrDeclaratorNode)):
- # Extension types are more difficult because the signature must match the base type signature.
- if not env.is_c_class_scope:
+ if (return_type.exception_value is not None and (visibility != 'extern' and not in_pxd)):
+ # - We skip this optimization for extension types; they are more difficult because
+ # the signature must match the base type signature.
+ # - Same for function pointers, as we want them to be able to match functions
+ # with any exception value.
+ # - Ideally the function-pointer test would be better after self.base is analysed
+ # however that is hard to do with the current implementation so it lives here
+ # for now.
+ if not env.is_c_class_scope and not isinstance(self.base, CPtrDeclaratorNode):
from .ExprNodes import ConstNode
self.exception_value = ConstNode(
self.pos, value=return_type.exception_value, type=return_type)
@@ -966,27 +969,34 @@ class CArgDeclNode(Node):
annotation = self.annotation
if not annotation:
return None
- base_type, arg_type = annotation.analyse_type_annotation(env, assigned_value=self.default)
- if base_type is not None:
- self.base_type = base_type
-
- if arg_type and arg_type.python_type_constructor_name == "typing.Optional":
- # "x: Optional[...]" => explicitly allow 'None'
- arg_type = arg_type.resolve()
- if arg_type and not arg_type.is_pyobject:
- error(annotation.pos, "Only Python type arguments can use typing.Optional[...]")
- else:
- self.or_none = True
- elif arg_type is py_object_type:
- # exclude ": object" from the None check - None is a generic object.
- self.or_none = True
- elif arg_type and arg_type.is_pyobject and self.default and self.default.is_none:
- # "x: ... = None" => implicitly allow 'None', but warn about it.
- if not self.or_none:
- warning(self.pos, "PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.")
+
+ modifiers, arg_type = annotation.analyse_type_annotation(env, assigned_value=self.default)
+ if arg_type is not None:
+ self.base_type = CAnalysedBaseTypeNode(
+ annotation.pos, type=arg_type, is_arg=True)
+
+ if arg_type:
+ if "typing.Optional" in modifiers:
+ # "x: Optional[...]" => explicitly allow 'None'
+ arg_type = arg_type.resolve()
+ if arg_type and not arg_type.is_pyobject:
+ # We probably already reported this as "cannot be applied to non-Python type".
+ # error(annotation.pos, "Only Python type arguments can use typing.Optional[...]")
+ pass
+ else:
+ self.or_none = True
+ elif arg_type is py_object_type:
+ # exclude ": object" from the None check - None is a generic object.
self.or_none = True
- elif arg_type and arg_type.is_pyobject and not self.or_none:
- self.not_none = True
+ elif self.default and self.default.is_none and (arg_type.is_pyobject or arg_type.equivalent_type):
+ # "x: ... = None" => implicitly allow 'None'
+ if not arg_type.is_pyobject:
+ arg_type = arg_type.equivalent_type
+ if not self.or_none:
+ warning(self.pos, "PEP-484 recommends 'typing.Optional[...]' for arguments that can be None.")
+ self.or_none = True
+ elif arg_type.is_pyobject and not self.or_none:
+ self.not_none = True
return arg_type
@@ -1076,9 +1086,9 @@ class CSimpleBaseTypeNode(CBaseTypeNode):
else:
type = py_object_type
else:
+ scope = env
if self.module_path:
# Maybe it's a nested C++ class.
- scope = env
for item in self.module_path:
entry = scope.lookup(item)
if entry is not None and (
@@ -1099,8 +1109,6 @@ class CSimpleBaseTypeNode(CBaseTypeNode):
if scope is None:
# Maybe it's a cimport.
scope = env.find_imported_module(self.module_path, self.pos)
- else:
- scope = env
if scope:
if scope.is_c_class_scope:
@@ -1139,10 +1147,9 @@ class CSimpleBaseTypeNode(CBaseTypeNode):
type = PyrexTypes.c_double_complex_type
type.create_declaration_utility_code(env)
self.complex = True
- if type:
- return type
- else:
- return PyrexTypes.error_type
+ if not type:
+ type = PyrexTypes.error_type
+ return type
class MemoryViewSliceTypeNode(CBaseTypeNode):
@@ -1211,10 +1218,40 @@ class TemplatedTypeNode(CBaseTypeNode):
child_attrs = ["base_type_node", "positional_args",
"keyword_args", "dtype_node"]
+ is_templated_type_node = True
dtype_node = None
-
name = None
+ def _analyse_template_types(self, env, base_type):
+ require_python_types = base_type.python_type_constructor_name in (
+ 'typing.Optional',
+ 'dataclasses.ClassVar',
+ )
+ in_c_type_context = env.in_c_type_context and not require_python_types
+
+ template_types = []
+ for template_node in self.positional_args:
+ # CBaseTypeNode -> allow C type declarations in a 'cdef' context again
+ with env.new_c_type_context(in_c_type_context or isinstance(template_node, CBaseTypeNode)):
+ ttype = template_node.analyse_as_type(env)
+ if ttype is None:
+ if base_type.is_cpp_class:
+ error(template_node.pos, "unknown type in template argument")
+ ttype = error_type
+ # For Python generics we can be a bit more flexible and allow None.
+ elif require_python_types and not ttype.is_pyobject:
+ if ttype.equivalent_type and not template_node.as_cython_attribute():
+ ttype = ttype.equivalent_type
+ else:
+ error(template_node.pos, "%s[...] cannot be applied to non-Python type %s" % (
+ base_type.python_type_constructor_name,
+ ttype,
+ ))
+ ttype = error_type
+ template_types.append(ttype)
+
+ return template_types
+
def analyse(self, env, could_be_name=False, base_type=None):
if base_type is None:
base_type = self.base_type_node.analyse(env)
@@ -1222,21 +1259,15 @@ class TemplatedTypeNode(CBaseTypeNode):
if ((base_type.is_cpp_class and base_type.is_template_type()) or
base_type.python_type_constructor_name):
- # Templated class
+ # Templated class, Python generics, etc.
if self.keyword_args and self.keyword_args.key_value_pairs:
tp = "c++ templates" if base_type.is_cpp_class else "indexed types"
error(self.pos, "%s cannot take keyword arguments" % tp)
self.type = PyrexTypes.error_type
- else:
- template_types = []
- for template_node in self.positional_args:
- type = template_node.analyse_as_type(env)
- if type is None and base_type.is_cpp_class:
- error(template_node.pos, "unknown type in template argument")
- type = error_type
- # for indexed_pytype we can be a bit more flexible and pass None
- template_types.append(type)
- self.type = base_type.specialize_here(template_node.pos, env, template_types)
+ return self.type
+
+ template_types = self._analyse_template_types(env, base_type)
+ self.type = base_type.specialize_here(self.pos, env, template_types)
elif base_type.is_pyobject:
# Buffer
@@ -1277,7 +1308,7 @@ class TemplatedTypeNode(CBaseTypeNode):
dimension=dimension)
self.type = self.array_declarator.analyse(base_type, env)[1]
- if self.type.is_fused and env.fused_to_specific:
+ if self.type and self.type.is_fused and env.fused_to_specific:
try:
self.type = self.type.specialize(env.fused_to_specific)
except CannotSpecialize:
@@ -1287,6 +1318,19 @@ class TemplatedTypeNode(CBaseTypeNode):
return self.type
+ def analyse_pytyping_modifiers(self, env):
+ # Check for declaration modifiers, e.g. "typing.Optional[...]" or "dataclasses.InitVar[...]"
+ # TODO: somehow bring this together with IndexNode.analyse_pytyping_modifiers()
+ modifiers = []
+ modifier_node = self
+ while modifier_node.is_templated_type_node and modifier_node.base_type_node and len(modifier_node.positional_args) == 1:
+ modifier_type = self.base_type_node.analyse_as_type(env)
+ if modifier_type.python_type_constructor_name and modifier_type.modifier_name:
+ modifiers.append(modifier_type.modifier_name)
+ modifier_node = modifier_node.positional_args[0]
+
+ return modifiers
+
class CComplexBaseTypeNode(CBaseTypeNode):
# base_type CBaseTypeNode
@@ -1414,6 +1458,11 @@ class CVarDefNode(StatNode):
base_type = self.base_type.analyse(env)
+ # Check for declaration modifiers, e.g. "typing.Optional[...]" or "dataclasses.InitVar[...]"
+ modifiers = None
+ if self.base_type.is_templated_type_node:
+ modifiers = self.base_type.analyse_pytyping_modifiers(env)
+
if base_type.is_fused and not self.in_pxd and (env.is_c_class_scope or
env.is_module_scope):
error(self.pos, "Fused types not allowed here")
@@ -1477,7 +1526,7 @@ class CVarDefNode(StatNode):
self.entry = dest_scope.declare_var(
name, type, declarator.pos,
cname=cname, visibility=visibility, in_pxd=self.in_pxd,
- api=self.api, is_cdef=1)
+ api=self.api, is_cdef=True, pytyping_modifiers=modifiers)
if Options.docstrings:
self.entry.doc = embed_position(self.pos, self.doc)
@@ -1586,6 +1635,9 @@ class CppClassNode(CStructOrUnionDefNode, BlockNode):
elif isinstance(attr, CompilerDirectivesNode):
for sub_attr in func_attributes(attr.body.stats):
yield sub_attr
+ elif isinstance(attr, CppClassNode) and attr.attributes is not None:
+ for sub_attr in func_attributes(attr.attributes):
+ yield sub_attr
if self.attributes is not None:
if self.in_pxd and not env.in_cinclude:
self.entry.defined_in_pxd = 1
@@ -2070,7 +2122,6 @@ class FuncDefNode(StatNode, BlockNode):
self.generate_argument_parsing_code(env, code)
# If an argument is assigned to in the body, we must
# incref it to properly keep track of refcounts.
- is_cdef = isinstance(self, CFuncDefNode)
for entry in lenv.arg_entries:
if not entry.type.is_memoryviewslice:
if (acquire_gil or entry.cf_is_reassigned) and not entry.in_closure:
@@ -2079,7 +2130,7 @@ class FuncDefNode(StatNode, BlockNode):
# we acquire arguments from object conversion, so we have
# new references. If we are a cdef function, we need to
# incref our arguments
- elif is_cdef and entry.cf_is_reassigned:
+ elif entry.cf_is_reassigned and not entry.in_closure:
code.put_var_incref_memoryviewslice(entry,
have_gil=code.funcstate.gil_owned)
for entry in lenv.var_entries:
@@ -2184,7 +2235,14 @@ class FuncDefNode(StatNode, BlockNode):
# code.put_trace_exception()
assure_gil('error')
+ if code.funcstate.error_without_exception:
+ tempvardecl_code.putln(
+ "int %s = 0; /* StopIteration */" % Naming.error_without_exception_cname
+ )
+ code.putln("if (!%s) {" % Naming.error_without_exception_cname)
code.put_add_traceback(self.entry.qualified_name)
+ if code.funcstate.error_without_exception:
+ code.putln("}")
else:
warning(self.entry.pos,
"Unraisable exception in function '%s'." %
@@ -2274,14 +2332,14 @@ class FuncDefNode(StatNode, BlockNode):
# Decref any increfed args
for entry in lenv.arg_entries:
+ if entry.in_closure:
+ continue
if entry.type.is_memoryviewslice:
# decref slices of def functions and acquired slices from cdef
# functions, but not borrowed slices from cdef functions.
- if is_cdef and not entry.cf_is_reassigned:
+ if not entry.cf_is_reassigned:
continue
else:
- if entry.in_closure:
- continue
if not acquire_gil and not entry.cf_is_reassigned:
continue
if entry.type.needs_refcounting:
@@ -2827,8 +2885,11 @@ class CFuncDefNode(FuncDefNode):
def put_into_closure(entry):
if entry.in_closure and not arg.default:
code.putln('%s = %s;' % (entry.cname, entry.original_cname))
- code.put_var_incref(entry)
- code.put_var_giveref(entry)
+ if entry.type.is_memoryviewslice:
+ entry.type.generate_incref_memoryviewslice(code, entry.cname, True)
+ else:
+ code.put_var_incref(entry)
+ code.put_var_giveref(entry)
for arg in self.args:
put_into_closure(scope.lookup_here(arg.name))
@@ -3164,7 +3225,7 @@ class DefNode(FuncDefNode):
else:
# probably just a plain 'object'
arg.accept_none = True
- else:
+ elif not arg.type.is_error:
arg.accept_none = True # won't be used, but must be there
if arg.not_none:
error(arg.pos, "Only Python type arguments can have 'not None'")
@@ -3457,8 +3518,20 @@ class DefNode(FuncDefNode):
# Move arguments into closure if required
def put_into_closure(entry):
if entry.in_closure:
- code.putln('%s = %s;' % (entry.cname, entry.original_cname))
- if entry.xdecref_cleanup:
+ if entry.type.is_array:
+ # This applies to generator expressions that iterate over C arrays (and need to
+ # capture them by value), under most other circumstances C array arguments are dropped to
+ # pointers so this copy isn't used
+ assert entry.type.size is not None
+ code.globalstate.use_utility_code(UtilityCode.load_cached("IncludeStringH", "StringTools.c"))
+ code.putln("memcpy({0}, {1}, sizeof({0}));".format(entry.cname, entry.original_cname))
+ else:
+ code.putln('%s = %s;' % (entry.cname, entry.original_cname))
+ if entry.type.is_memoryviewslice:
+ # TODO - at some point reference count of memoryviews should
+ # genuinely be unified with PyObjects
+ entry.type.generate_incref_memoryviewslice(code, entry.cname, True)
+ elif entry.xdecref_cleanup:
# mostly applies to the starstar arg - this can sometimes be NULL
# so must be xincrefed instead
code.put_var_xincref(entry)
@@ -3616,11 +3689,20 @@ class DefNodeWrapper(FuncDefNode):
# ----- Non-error return cleanup
code.put_label(code.return_label)
for entry in lenv.var_entries:
- if entry.is_arg and entry.type.is_pyobject:
+ if entry.is_arg:
+ # mainly captures the star/starstar args
if entry.xdecref_cleanup:
code.put_var_xdecref(entry)
else:
code.put_var_decref(entry)
+ for arg in self.args:
+ if not arg.type.is_pyobject:
+ # This captures anything that's been converted from a PyObject.
+ # Primarily memoryviews at the moment
+ if arg.entry.xdecref_cleanup:
+ code.put_var_xdecref(arg.entry)
+ else:
+ code.put_var_decref(arg.entry)
code.put_finish_refcount_context()
if not self.return_type.is_void:
@@ -3673,7 +3755,7 @@ class DefNodeWrapper(FuncDefNode):
with_pymethdef = False
dc = self.return_type.declaration_code(entry.func_cname)
- header = "static %s%s(%s)" % (mf, dc, arg_code)
+ header = "%sstatic %s(%s)" % (mf, dc, arg_code)
code.putln("%s; /*proto*/" % header)
if proto_only:
@@ -5100,7 +5182,6 @@ class CClassDefNode(ClassDefNode):
check_size = None
decorators = None
shadow = False
- is_dataclass = False
@property
def punycode_class_name(self):
@@ -5142,6 +5223,8 @@ class CClassDefNode(ClassDefNode):
api=self.api,
buffer_defaults=self.buffer_defaults(env),
shadow=self.shadow)
+ if self.bases and len(self.bases.args) > 1:
+ self.entry.type.multiple_bases = True
def analyse_declarations(self, env):
#print "CClassDefNode.analyse_declarations:", self.class_name
@@ -5150,8 +5233,6 @@ class CClassDefNode(ClassDefNode):
if env.in_cinclude and not self.objstruct_name:
error(self.pos, "Object struct name specification required for C class defined in 'extern from' block")
- if "dataclasses.dataclass" in env.directives:
- self.is_dataclass = True
if self.decorators:
error(self.pos, "Decorators not allowed on cdef classes (used on type '%s')" % self.class_name)
self.base_type = None
@@ -5188,7 +5269,8 @@ class CClassDefNode(ClassDefNode):
error(base.pos, "Base class '%s' of type '%s' is final" % (
base_type, self.class_name))
elif base_type.is_builtin_type and \
- base_type.name in ('tuple', 'str', 'bytes'):
+ base_type.name in ('tuple', 'bytes'):
+ # str in Py2 is also included in this, but now checked at run-time
error(base.pos, "inheritance from PyVarObject types like '%s' is not currently supported"
% base_type.name)
else:
@@ -5232,6 +5314,8 @@ class CClassDefNode(ClassDefNode):
api=self.api,
buffer_defaults=self.buffer_defaults(env),
shadow=self.shadow)
+ if self.bases and len(self.bases.args) > 1:
+ self.entry.type.multiple_bases = True
if self.shadow:
home_scope.lookup(self.class_name).as_variable = self.entry
@@ -5240,6 +5324,15 @@ class CClassDefNode(ClassDefNode):
self.scope = scope = self.entry.type.scope
if scope is not None:
scope.directives = env.directives
+ if "dataclasses.dataclass" in env.directives:
+ is_frozen = False
+ # Retrieve the @dataclass config (args, kwargs), as passed into the decorator.
+ dataclass_config = env.directives["dataclasses.dataclass"]
+ if dataclass_config:
+ decorator_kwargs = dataclass_config[1]
+ frozen_flag = decorator_kwargs.get('frozen')
+ is_frozen = frozen_flag and frozen_flag.is_literal and frozen_flag.value
+ scope.is_c_dataclass_scope = "frozen" if is_frozen else True
if self.doc and Options.docstrings:
scope.doc = embed_position(self.pos, self.doc)
@@ -5435,8 +5528,10 @@ class CClassDefNode(ClassDefNode):
typeptr_cname, buffer_slot.slot_name,
))
code.putln("}")
+ code.putln("#elif defined(_MSC_VER)")
+ code.putln("#pragma message (\"The buffer protocol is not supported in the Limited C-API.\")")
code.putln("#else")
- code.putln("#warning The buffer protocol is not supported in the Limited C-API.")
+ code.putln("#warning \"The buffer protocol is not supported in the Limited C-API.\"")
code.putln("#endif")
code.globalstate.use_utility_code(
@@ -5455,6 +5550,22 @@ class CClassDefNode(ClassDefNode):
))
code.putln("#endif") # if CYTHON_USE_TYPE_SPECS
+ base_type = type.base_type
+ while base_type:
+ if base_type.is_external and not base_type.objstruct_cname == "PyTypeObject":
+ # 'type' is special-cased because it is actually based on PyHeapTypeObject
+ # Variable length bases are allowed if the current class doesn't grow
+ code.putln("if (sizeof(%s%s) != sizeof(%s%s)) {" % (
+ "" if type.typedef_flag else "struct ", type.objstruct_cname,
+ "" if base_type.typedef_flag else "struct ", base_type.objstruct_cname))
+ code.globalstate.use_utility_code(
+ UtilityCode.load_cached("ValidateExternBase", "ExtensionTypes.c"))
+ code.put_error_if_neg(entry.pos, "__Pyx_validate_extern_base(%s)" % (
+ type.base_type.typeptr_cname))
+ code.putln("}")
+ break
+ base_type = base_type.base_type
+
code.putln("#if !CYTHON_COMPILING_IN_LIMITED_API")
# FIXME: these still need to get initialised even with the limited-API
for slot in TypeSlots.get_slot_table(code.globalstate.directives):
@@ -6654,11 +6765,15 @@ class RaiseStatNode(StatNode):
# exc_value ExprNode or None
# exc_tb ExprNode or None
# cause ExprNode or None
+ #
+ # set in FlowControl
+ # in_try_block bool
child_attrs = ["exc_type", "exc_value", "exc_tb", "cause"]
is_terminator = True
builtin_exc_name = None
wrap_tuple_value = False
+ in_try_block = False
def analyse_expressions(self, env):
if self.exc_type:
@@ -6687,9 +6802,19 @@ class RaiseStatNode(StatNode):
not (exc.args or (exc.arg_tuple is not None and exc.arg_tuple.args))):
exc = exc.function # extract the exception type
if exc.is_name and exc.entry.is_builtin:
+ from . import Symtab
self.builtin_exc_name = exc.name
if self.builtin_exc_name == 'MemoryError':
self.exc_type = None # has a separate implementation
+ elif (self.builtin_exc_name == 'StopIteration' and
+ env.is_local_scope and env.name == "__next__" and
+ env.parent_scope and env.parent_scope.is_c_class_scope and
+ not self.in_try_block):
+ # tp_iternext is allowed to return NULL without raising StopIteration.
+ # For the sake of simplicity, only allow this to happen when not in
+ # a try block
+ self.exc_type = None
+
return self
nogil_check = Node.gil_error
@@ -6700,6 +6825,11 @@ class RaiseStatNode(StatNode):
if self.builtin_exc_name == 'MemoryError':
code.putln('PyErr_NoMemory(); %s' % code.error_goto(self.pos))
return
+ elif self.builtin_exc_name == 'StopIteration' and not self.exc_type:
+ code.putln('%s = 1;' % Naming.error_without_exception_cname)
+ code.putln('%s;' % code.error_goto(None))
+ code.funcstate.error_without_exception = True
+ return
if self.exc_type:
self.exc_type.generate_evaluation_code(code)
@@ -8610,7 +8740,7 @@ class FromCImportStatNode(StatNode):
#
# module_name string Qualified name of module
# relative_level int or None Relative import: number of dots before module_name
- # imported_names [(pos, name, as_name, kind)] Names to be imported
+ # imported_names [(pos, name, as_name)] Names to be imported
child_attrs = []
module_name = None
@@ -8621,35 +8751,34 @@ class FromCImportStatNode(StatNode):
if not env.is_module_scope:
error(self.pos, "cimport only allowed at module level")
return
- if self.relative_level and self.relative_level > env.qualified_name.count('.'):
- error(self.pos, "relative cimport beyond main package is not allowed")
- return
+ qualified_name_components = env.qualified_name.count('.') + 1
+ if self.relative_level:
+ if self.relative_level > qualified_name_components:
+ # 1. case: importing beyond package: from .. import pkg
+ error(self.pos, "relative cimport beyond main package is not allowed")
+ return
+ elif self.relative_level == qualified_name_components and not env.is_package:
+ # 2. case: importing from same level but current dir is not package: from . import module
+ error(self.pos, "relative cimport from non-package directory is not allowed")
+ return
module_scope = env.find_module(self.module_name, self.pos, relative_level=self.relative_level)
module_name = module_scope.qualified_name
env.add_imported_module(module_scope)
- for pos, name, as_name, kind in self.imported_names:
+ for pos, name, as_name in self.imported_names:
if name == "*":
for local_name, entry in list(module_scope.entries.items()):
env.add_imported_entry(local_name, entry, pos)
else:
entry = module_scope.lookup(name)
if entry:
- if kind and not self.declaration_matches(entry, kind):
- entry.redeclared(pos)
entry.used = 1
else:
- if kind == 'struct' or kind == 'union':
- entry = module_scope.declare_struct_or_union(
- name, kind=kind, scope=None, typedef_flag=0, pos=pos)
- elif kind == 'class':
- entry = module_scope.declare_c_class(name, pos=pos, module_name=module_name)
+ submodule_scope = env.context.find_module(
+ name, relative_to=module_scope, pos=self.pos, absolute_fallback=False)
+ if submodule_scope.parent_module is module_scope:
+ env.declare_module(as_name or name, submodule_scope, self.pos)
else:
- submodule_scope = env.context.find_module(
- name, relative_to=module_scope, pos=self.pos, absolute_fallback=False)
- if submodule_scope.parent_module is module_scope:
- env.declare_module(as_name or name, submodule_scope, self.pos)
- else:
- error(pos, "Name '%s' not declared in module '%s'" % (name, module_name))
+ error(pos, "Name '%s' not declared in module '%s'" % (name, module_name))
if entry:
local_name = as_name or name
@@ -8658,7 +8787,7 @@ class FromCImportStatNode(StatNode):
if module_name.startswith('cpython') or module_name.startswith('cython'): # enough for now
if module_name in utility_code_for_cimports:
env.use_utility_code(utility_code_for_cimports[module_name]())
- for _, name, _, _ in self.imported_names:
+ for _, name, _ in self.imported_names:
fqname = '%s.%s' % (module_name, name)
if fqname in utility_code_for_cimports:
env.use_utility_code(utility_code_for_cimports[fqname]())
@@ -10028,13 +10157,13 @@ class CnameDecoratorNode(StatNode):
class ErrorNode(Node):
"""
- Node type for things that we want to get throught the parser
+ Node type for things that we want to get through the parser
(especially for things that are being scanned in "tentative_scan"
blocks), but should immediately raise and error afterwards.
what str
"""
- pass
+ child_attrs = []
#------------------------------------------------------------------------------------