Skip to content
Fetching contributors…
Cannot retrieve contributors at this time
7572 lines (6571 sloc) 285 KB
#
# Pyrex - Parse tree nodes
#
import cython
from cython import set
cython.declare(sys=object, os=object, time=object, copy=object,
Builtin=object, error=object, warning=object, Naming=object, PyrexTypes=object,
py_object_type=object, ModuleScope=object, LocalScope=object, ClosureScope=object, \
StructOrUnionScope=object, PyClassScope=object, CClassScope=object,
CppClassScope=object, UtilityCode=object, EncodedString=object,
absolute_path_length=cython.Py_ssize_t)
import sys, os, time, copy
import Builtin
from Errors import error, warning, InternalError
import Naming
import PyrexTypes
import TypeSlots
from PyrexTypes import py_object_type, error_type, CFuncType
from Symtab import ModuleScope, LocalScope, ClosureScope, \
StructOrUnionScope, PyClassScope, CClassScope, CppClassScope
from Cython.Utils import open_new_file, replace_suffix
from Code import UtilityCode, ClosureTempAllocator
from StringEncoding import EncodedString, escape_byte_string, split_string_literal
import Options
import DebugFlags
absolute_path_length = 0
def relative_position(pos):
"""
We embed the relative filename in the generated C file, since we
don't want to have to regnerate and compile all the source code
whenever the Python install directory moves (which could happen,
e.g,. when distributing binaries.)
INPUT:
a position tuple -- (absolute filename, line number column position)
OUTPUT:
relative filename
line number
AUTHOR: William Stein
"""
global absolute_path_length
if absolute_path_length==0:
absolute_path_length = len(os.path.abspath(os.getcwd()))
return (pos[0].get_filenametable_entry()[absolute_path_length+1:], pos[1])
def embed_position(pos, docstring):
if not Options.embed_pos_in_docstring:
return docstring
pos_line = u'File: %s (starting at line %s)' % relative_position(pos)
if docstring is None:
# unicode string
return EncodedString(pos_line)
# make sure we can encode the filename in the docstring encoding
# otherwise make the docstring a unicode string
encoding = docstring.encoding
if encoding is not None:
try:
encoded_bytes = pos_line.encode(encoding)
except UnicodeEncodeError:
encoding = None
if not docstring:
# reuse the string encoding of the original docstring
doc = EncodedString(pos_line)
else:
doc = EncodedString(pos_line + u'\n' + docstring)
doc.encoding = encoding
return doc
from Code import CCodeWriter
from types import FunctionType
def write_func_call(func):
def f(*args, **kwds):
if len(args) > 1 and isinstance(args[1], CCodeWriter):
# here we annotate the code with this function call
# but only if new code is generated
node, code = args[:2]
marker = ' /* %s -> %s.%s %s */' % (
' ' * code.call_level,
node.__class__.__name__,
func.__name__,
node.pos[1:])
pristine = code.buffer.stream.tell()
code.putln(marker)
start = code.buffer.stream.tell()
code.call_level += 4
res = func(*args, **kwds)
code.call_level -= 4
if start == code.buffer.stream.tell():
code.buffer.stream.seek(pristine)
else:
marker = marker.replace('->', '<-')
code.putln(marker)
return res
else:
return func(*args, **kwds)
return f
class VerboseCodeWriter(type):
# Set this as a metaclass to trace function calls in code.
# This slows down code generation and makes much larger files.
def __new__(cls, name, bases, attrs):
attrs = dict(attrs)
for mname, m in attrs.items():
if isinstance(m, FunctionType):
attrs[mname] = write_func_call(m)
return super(VerboseCodeWriter, cls).__new__(cls, name, bases, attrs)
class Node(object):
# pos (string, int, int) Source file position
# is_name boolean Is a NameNode
# is_literal boolean Is a ConstNode
if DebugFlags.debug_trace_code_generation:
__metaclass__ = VerboseCodeWriter
is_name = 0
is_literal = 0
is_terminator = 0
temps = None
# All descandants should set child_attrs to a list of the attributes
# containing nodes considered "children" in the tree. Each such attribute
# can either contain a single node or a list of nodes. See Visitor.py.
child_attrs = None
cf_state = None
def __init__(self, pos, **kw):
self.pos = pos
self.__dict__.update(kw)
gil_message = "Operation"
nogil_check = None
def gil_error(self, env=None):
error(self.pos, "%s not allowed without gil" % self.gil_message)
cpp_message = "Operation"
def cpp_check(self, env):
if not env.is_cpp():
self.cpp_error()
def cpp_error(self):
error(self.pos, "%s only allowed in c++" % self.cpp_message)
def clone_node(self):
"""Clone the node. This is defined as a shallow copy, except for member lists
amongst the child attributes (from get_child_accessors) which are also
copied. Lists containing child nodes are thus seen as a way for the node
to hold multiple children directly; the list is not treated as a seperate
level in the tree."""
result = copy.copy(self)
for attrname in result.child_attrs:
value = getattr(result, attrname)
if isinstance(value, list):
setattr(result, attrname, [x for x in value])
return result
#
# There are 3 phases of parse tree processing, applied in order to
# all the statements in a given scope-block:
#
# (0) analyse_declarations
# Make symbol table entries for all declarations at the current
# level, both explicit (def, cdef, etc.) and implicit (assignment
# to an otherwise undeclared name).
#
# (1) analyse_expressions
# Determine the result types of expressions and fill in the
# 'type' attribute of each ExprNode. Insert coercion nodes into the
# tree where needed to convert to and from Python objects.
# Allocate temporary locals for intermediate results. Fill
# in the 'result_code' attribute of each ExprNode with a C code
# fragment.
#
# (2) generate_code
# Emit C code for all declarations, statements and expressions.
# Recursively applies the 3 processing phases to the bodies of
# functions.
#
def analyse_declarations(self, env):
pass
def analyse_expressions(self, env):
raise InternalError("analyse_expressions not implemented for %s" % \
self.__class__.__name__)
def generate_code(self, code):
raise InternalError("generate_code not implemented for %s" % \
self.__class__.__name__)
def annotate(self, code):
# mro does the wrong thing
if isinstance(self, BlockNode):
self.body.annotate(code)
def end_pos(self):
try:
return self._end_pos
except AttributeError:
pos = self.pos
if not self.child_attrs:
self._end_pos = pos
return pos
for attr in self.child_attrs:
child = getattr(self, attr)
# Sometimes lists, sometimes nodes
if child is None:
pass
elif isinstance(child, list):
for c in child:
pos = max(pos, c.end_pos())
else:
pos = max(pos, child.end_pos())
self._end_pos = pos
return pos
def dump(self, level=0, filter_out=("pos",), cutoff=100, encountered=None):
if cutoff == 0:
return "<...nesting level cutoff...>"
if encountered is None:
encountered = set()
if id(self) in encountered:
return "<%s (0x%x) -- already output>" % (self.__class__.__name__, id(self))
encountered.add(id(self))
def dump_child(x, level):
if isinstance(x, Node):
return x.dump(level, filter_out, cutoff-1, encountered)
elif isinstance(x, list):
return "[%s]" % ", ".join([dump_child(item, level) for item in x])
else:
return repr(x)
attrs = [(key, value) for key, value in self.__dict__.items() if key not in filter_out]
if len(attrs) == 0:
return "<%s (0x%x)>" % (self.__class__.__name__, id(self))
else:
indent = " " * level
res = "<%s (0x%x)\n" % (self.__class__.__name__, id(self))
for key, value in attrs:
res += "%s %s: %s\n" % (indent, key, dump_child(value, level + 1))
res += "%s>" % indent
return res
class CompilerDirectivesNode(Node):
"""
Sets compiler directives for the children nodes
"""
# directives {string:value} A dictionary holding the right value for
# *all* possible directives.
# body Node
child_attrs = ["body"]
def analyse_declarations(self, env):
old = env.directives
env.directives = self.directives
self.body.analyse_declarations(env)
env.directives = old
def analyse_expressions(self, env):
old = env.directives
env.directives = self.directives
self.body.analyse_expressions(env)
env.directives = old
def generate_function_definitions(self, env, code):
env_old = env.directives
code_old = code.globalstate.directives
code.globalstate.directives = self.directives
self.body.generate_function_definitions(env, code)
env.directives = env_old
code.globalstate.directives = code_old
def generate_execution_code(self, code):
old = code.globalstate.directives
code.globalstate.directives = self.directives
self.body.generate_execution_code(code)
code.globalstate.directives = old
def annotate(self, code):
old = code.globalstate.directives
code.globalstate.directives = self.directives
self.body.annotate(code)
code.globalstate.directives = old
class BlockNode(object):
# Mixin class for nodes representing a declaration block.
def generate_cached_builtins_decls(self, env, code):
entries = env.global_scope().undeclared_cached_builtins
for entry in entries:
code.globalstate.add_cached_builtin_decl(entry)
del entries[:]
def generate_lambda_definitions(self, env, code):
for node in env.lambda_defs:
node.generate_function_definitions(env, code)
class StatListNode(Node):
# stats a list of StatNode
child_attrs = ["stats"]
def create_analysed(pos, env, *args, **kw):
node = StatListNode(pos, *args, **kw)
return node # No node-specific analysis necesarry
create_analysed = staticmethod(create_analysed)
def analyse_declarations(self, env):
#print "StatListNode.analyse_declarations" ###
for stat in self.stats:
stat.analyse_declarations(env)
def analyse_expressions(self, env):
#print "StatListNode.analyse_expressions" ###
for stat in self.stats:
stat.analyse_expressions(env)
def generate_function_definitions(self, env, code):
#print "StatListNode.generate_function_definitions" ###
for stat in self.stats:
stat.generate_function_definitions(env, code)
def generate_execution_code(self, code):
#print "StatListNode.generate_execution_code" ###
for stat in self.stats:
code.mark_pos(stat.pos)
stat.generate_execution_code(code)
def annotate(self, code):
for stat in self.stats:
stat.annotate(code)
class StatNode(Node):
#
# Code generation for statements is split into the following subphases:
#
# (1) generate_function_definitions
# Emit C code for the definitions of any structs,
# unions, enums and functions defined in the current
# scope-block.
#
# (2) generate_execution_code
# Emit C code for executable statements.
#
def generate_function_definitions(self, env, code):
pass
def generate_execution_code(self, code):
raise InternalError("generate_execution_code not implemented for %s" % \
self.__class__.__name__)
class CDefExternNode(StatNode):
# include_file string or None
# body StatNode
child_attrs = ["body"]
def analyse_declarations(self, env):
if self.include_file:
env.add_include_file(self.include_file)
old_cinclude_flag = env.in_cinclude
env.in_cinclude = 1
self.body.analyse_declarations(env)
env.in_cinclude = old_cinclude_flag
def analyse_expressions(self, env):
pass
def generate_execution_code(self, code):
self.body.generate_execution_code(code)
def annotate(self, code):
self.body.annotate(code)
class CDeclaratorNode(Node):
# Part of a C declaration.
#
# Processing during analyse_declarations phase:
#
# analyse
# Returns (name, type) pair where name is the
# CNameDeclaratorNode of the name being declared
# and type is the type it is being declared as.
#
# calling_convention string Calling convention of CFuncDeclaratorNode
# for which this is a base
child_attrs = []
calling_convention = ""
class CNameDeclaratorNode(CDeclaratorNode):
# name string The Pyrex name being declared
# cname string or None C name, if specified
# default ExprNode or None the value assigned on declaration
child_attrs = ['default']
default = None
def analyse(self, base_type, env, nonempty = 0):
if nonempty and self.name == '':
# May have mistaken the name for the type.
if base_type.is_ptr or base_type.is_array or base_type.is_buffer:
error(self.pos, "Missing argument name")
elif base_type.is_void:
error(self.pos, "Use spam() rather than spam(void) to declare a function with no arguments.")
else:
self.name = base_type.declaration_code("", for_display=1, pyrex=1)
base_type = py_object_type
self.type = base_type
return self, base_type
class CPtrDeclaratorNode(CDeclaratorNode):
# base CDeclaratorNode
child_attrs = ["base"]
def analyse(self, base_type, env, nonempty = 0):
if base_type.is_pyobject:
error(self.pos,
"Pointer base type cannot be a Python object")
ptr_type = PyrexTypes.c_ptr_type(base_type)
return self.base.analyse(ptr_type, env, nonempty = nonempty)
class CReferenceDeclaratorNode(CDeclaratorNode):
# base CDeclaratorNode
child_attrs = ["base"]
def analyse(self, base_type, env, nonempty = 0):
if base_type.is_pyobject:
error(self.pos,
"Reference base type cannot be a Python object")
ref_type = PyrexTypes.c_ref_type(base_type)
return self.base.analyse(ref_type, env, nonempty = nonempty)
class CArrayDeclaratorNode(CDeclaratorNode):
# base CDeclaratorNode
# dimension ExprNode
child_attrs = ["base", "dimension"]
def analyse(self, base_type, env, nonempty = 0):
if base_type.is_cpp_class:
from ExprNodes import TupleNode
if isinstance(self.dimension, TupleNode):
args = self.dimension.args
else:
args = self.dimension,
values = [v.analyse_as_type(env) for v in args]
if None in values:
ix = values.index(None)
error(args[ix].pos, "Template parameter not a type.")
return error_type
base_type = base_type.specialize_here(self.pos, values)
return self.base.analyse(base_type, env, nonempty = nonempty)
if self.dimension:
self.dimension.analyse_const_expression(env)
if not self.dimension.type.is_int:
error(self.dimension.pos, "Array dimension not integer")
size = self.dimension.get_constant_c_result_code()
if size is not None:
try:
size = int(size)
except ValueError:
# runtime constant?
pass
else:
size = None
if not base_type.is_complete():
error(self.pos,
"Array element type '%s' is incomplete" % base_type)
if base_type.is_pyobject:
error(self.pos,
"Array element cannot be a Python object")
if base_type.is_cfunction:
error(self.pos,
"Array element cannot be a function")
array_type = PyrexTypes.c_array_type(base_type, size)
return self.base.analyse(array_type, env, nonempty = nonempty)
class CFuncDeclaratorNode(CDeclaratorNode):
# base CDeclaratorNode
# args [CArgDeclNode]
# has_varargs boolean
# exception_value ConstNode
# exception_check boolean True if PyErr_Occurred check needed
# nogil boolean Can be called without gil
# with_gil boolean Acquire gil around function body
child_attrs = ["base", "args", "exception_value"]
overridable = 0
optional_arg_count = 0
def analyse(self, return_type, env, nonempty = 0, directive_locals = {}):
if nonempty:
nonempty -= 1
func_type_args = []
for i, arg_node in enumerate(self.args):
name_declarator, type = arg_node.analyse(env, nonempty = nonempty,
is_self_arg = (i == 0 and env.is_c_class_scope))
name = name_declarator.name
if name in directive_locals:
type_node = directive_locals[name]
other_type = type_node.analyse_as_type(env)
if other_type is None:
error(type_node.pos, "Not a type")
elif (type is not PyrexTypes.py_object_type
and not type.same_as(other_type)):
error(self.base.pos, "Signature does not agree with previous declaration")
error(type_node.pos, "Previous declaration here")
else:
type = other_type
if name_declarator.cname:
error(self.pos,
"Function argument cannot have C name specification")
if i==0 and env.is_c_class_scope and type.is_unspecified:
# fix the type of self
type = env.parent_type
# Turn *[] argument into **
if type.is_array:
type = PyrexTypes.c_ptr_type(type.base_type)
# Catch attempted C-style func(void) decl
if type.is_void:
error(arg_node.pos, "Use spam() rather than spam(void) to declare a function with no arguments.")
func_type_args.append(
PyrexTypes.CFuncTypeArg(name, type, arg_node.pos))
if arg_node.default:
self.optional_arg_count += 1
elif self.optional_arg_count:
error(self.pos, "Non-default argument follows default argument")
if self.optional_arg_count:
scope = StructOrUnionScope()
arg_count_member = '%sn' % Naming.pyrex_prefix
scope.declare_var(arg_count_member, PyrexTypes.c_int_type, self.pos)
for arg in func_type_args[len(func_type_args)-self.optional_arg_count:]:
scope.declare_var(arg.name, arg.type, arg.pos, allow_pyobject = 1)
struct_cname = env.mangle(Naming.opt_arg_prefix, self.base.name)
self.op_args_struct = env.global_scope().declare_struct_or_union(name = struct_cname,
kind = 'struct',
scope = scope,
typedef_flag = 0,
pos = self.pos,
cname = struct_cname)
self.op_args_struct.defined_in_pxd = 1
self.op_args_struct.used = 1
exc_val = None
exc_check = 0
if self.exception_check == '+':
env.add_include_file('ios') # for std::ios_base::failure
env.add_include_file('new') # for std::bad_alloc
env.add_include_file('stdexcept')
env.add_include_file('typeinfo') # for std::bad_cast
if return_type.is_pyobject \
and (self.exception_value or self.exception_check) \
and self.exception_check != '+':
error(self.pos,
"Exception clause not allowed for function returning Python object")
else:
if self.exception_value:
self.exception_value.analyse_const_expression(env)
if self.exception_check == '+':
self.exception_value.analyse_types(env)
exc_val_type = self.exception_value.type
if not exc_val_type.is_error and \
not exc_val_type.is_pyobject and \
not (exc_val_type.is_cfunction and not exc_val_type.return_type.is_pyobject and len(exc_val_type.args)==0):
error(self.exception_value.pos,
"Exception value must be a Python exception or cdef function with no arguments.")
exc_val = self.exception_value
else:
self.exception_value = self.exception_value.coerce_to(return_type, env)
if self.exception_value.analyse_const_expression(env):
exc_val = self.exception_value.get_constant_c_result_code()
if exc_val is None:
raise InternalError("get_constant_c_result_code not implemented for %s" %
self.exception_value.__class__.__name__)
if not return_type.assignable_from(self.exception_value.type):
error(self.exception_value.pos,
"Exception value incompatible with function return type")
exc_check = self.exception_check
if return_type.is_cfunction:
error(self.pos,
"Function cannot return a function")
func_type = PyrexTypes.CFuncType(
return_type, func_type_args, self.has_varargs,
optional_arg_count = self.optional_arg_count,
exception_value = exc_val, exception_check = exc_check,
calling_convention = self.base.calling_convention,
nogil = self.nogil, with_gil = self.with_gil, is_overridable = self.overridable)
if self.optional_arg_count:
func_type.op_arg_struct = PyrexTypes.c_ptr_type(self.op_args_struct.type)
callspec = env.directives['callspec']
if callspec:
current = func_type.calling_convention
if current and current != callspec:
error(self.pos, "cannot have both '%s' and '%s' "
"calling conventions" % (current, callspec))
func_type.calling_convention = callspec
return self.base.analyse(func_type, env)
class CArgDeclNode(Node):
# Item in a function declaration argument list.
#
# base_type CBaseTypeNode
# declarator CDeclaratorNode
# not_none boolean Tagged with 'not None'
# or_none boolean Tagged with 'or None'
# accept_none boolean Resolved boolean for not_none/or_none
# default ExprNode or None
# default_value PyObjectConst constant for default value
# annotation ExprNode or None Py3 function arg annotation
# is_self_arg boolean Is the "self" arg of an extension type method
# is_type_arg boolean Is the "class" arg of an extension type classmethod
# is_kw_only boolean Is a keyword-only argument
child_attrs = ["base_type", "declarator", "default"]
is_self_arg = 0
is_type_arg = 0
is_generic = 1
kw_only = 0
not_none = 0
or_none = 0
type = None
name_declarator = None
default_value = None
annotation = None
def analyse(self, env, nonempty = 0, is_self_arg = False):
if is_self_arg:
self.base_type.is_self_arg = self.is_self_arg = True
if self.type is None:
# The parser may missinterpret names as types...
# We fix that here.
if isinstance(self.declarator, CNameDeclaratorNode) and self.declarator.name == '':
if nonempty:
self.declarator.name = self.base_type.name
self.base_type.name = None
self.base_type.is_basic_c_type = False
could_be_name = True
else:
could_be_name = False
base_type = self.base_type.analyse(env, could_be_name = could_be_name)
if hasattr(self.base_type, 'arg_name') and self.base_type.arg_name:
self.declarator.name = self.base_type.arg_name
# The parser is unable to resolve the ambiguity of [] as part of the
# type (e.g. in buffers) or empty declarator (as with arrays).
# This is only arises for empty multi-dimensional arrays.
if (base_type.is_array
and isinstance(self.base_type, TemplatedTypeNode)
and isinstance(self.declarator, CArrayDeclaratorNode)):
declarator = self.declarator
while isinstance(declarator.base, CArrayDeclaratorNode):
declarator = declarator.base
declarator.base = self.base_type.array_declarator
base_type = base_type.base_type
return self.declarator.analyse(base_type, env, nonempty = nonempty)
else:
return self.name_declarator, self.type
def calculate_default_value_code(self, code):
if self.default_value is None:
if self.default:
if self.default.is_literal:
# will not output any code, just assign the result_code
self.default.generate_evaluation_code(code)
return self.type.cast_code(self.default.result())
self.default_value = code.get_argument_default_const(self.type)
return self.default_value
def annotate(self, code):
if self.default:
self.default.annotate(code)
class CBaseTypeNode(Node):
# Abstract base class for C base type nodes.
#
# Processing during analyse_declarations phase:
#
# analyse
# Returns the type.
pass
def analyse_as_type(self, env):
return self.analyse(env)
class CAnalysedBaseTypeNode(Node):
# type type
child_attrs = []
def analyse(self, env, could_be_name = False):
return self.type
class CSimpleBaseTypeNode(CBaseTypeNode):
# name string
# module_path [string] Qualifying name components
# is_basic_c_type boolean
# signed boolean
# longness integer
# complex boolean
# is_self_arg boolean Is self argument of C method
# ##is_type_arg boolean Is type argument of class method
child_attrs = []
arg_name = None # in case the argument name was interpreted as a type
module_path = []
is_basic_c_type = False
complex = False
def analyse(self, env, could_be_name = False):
# Return type descriptor.
#print "CSimpleBaseTypeNode.analyse: is_self_arg =", self.is_self_arg ###
type = None
if self.is_basic_c_type:
type = PyrexTypes.simple_c_type(self.signed, self.longness, self.name)
if not type:
error(self.pos, "Unrecognised type modifier combination")
elif self.name == "object" and not self.module_path:
type = py_object_type
elif self.name is None:
if self.is_self_arg and env.is_c_class_scope:
#print "CSimpleBaseTypeNode.analyse: defaulting to parent type" ###
type = env.parent_type
## elif self.is_type_arg and env.is_c_class_scope:
## type = Builtin.type_type
else:
type = py_object_type
else:
if self.module_path:
scope = env.find_imported_module(self.module_path, self.pos)
else:
scope = env
if scope:
if scope.is_c_class_scope:
scope = scope.global_scope()
entry = scope.lookup(self.name)
if entry and entry.is_type:
type = entry.type
elif could_be_name:
if self.is_self_arg and env.is_c_class_scope:
type = env.parent_type
## elif self.is_type_arg and env.is_c_class_scope:
## type = Builtin.type_type
else:
type = py_object_type
self.arg_name = self.name
else:
if self.templates:
if not self.name in self.templates:
error(self.pos, "'%s' is not a type identifier" % self.name)
type = PyrexTypes.TemplatePlaceholderType(self.name)
else:
error(self.pos, "'%s' is not a type identifier" % self.name)
if self.complex:
if not type.is_numeric or type.is_complex:
error(self.pos, "can only complexify c numeric types")
type = PyrexTypes.CComplexType(type)
type.create_declaration_utility_code(env)
elif type is Builtin.complex_type:
# Special case: optimise builtin complex type into C's
# double complex. The parser cannot do this (as for the
# normal scalar types) as the user may have redeclared the
# 'complex' type. Testing for the exact type here works.
type = PyrexTypes.c_double_complex_type
type.create_declaration_utility_code(env)
self.complex = True
if type:
return type
else:
return PyrexTypes.error_type
class CNestedBaseTypeNode(CBaseTypeNode):
# For C++ classes that live inside other C++ classes.
# name string
# base_type CBaseTypeNode
child_attrs = ['base_type']
def analyse(self, env, could_be_name = None):
base_type = self.base_type.analyse(env)
if base_type is PyrexTypes.error_type:
return PyrexTypes.error_type
if not base_type.is_cpp_class:
error(self.pos, "'%s' is not a valid type scope" % base_type)
return PyrexTypes.error_type
type_entry = base_type.scope.lookup_here(self.name)
if not type_entry or not type_entry.is_type:
error(self.pos, "'%s.%s' is not a type identifier" % (base_type, self.name))
return PyrexTypes.error_type
return type_entry.type
class TemplatedTypeNode(CBaseTypeNode):
# After parsing:
# positional_args [ExprNode] List of positional arguments
# keyword_args DictNode Keyword arguments
# base_type_node CBaseTypeNode
# After analysis:
# type PyrexTypes.BufferType or PyrexTypes.CppClassType ...containing the right options
child_attrs = ["base_type_node", "positional_args",
"keyword_args", "dtype_node"]
dtype_node = None
name = None
def analyse(self, env, could_be_name = False, base_type = None):
if base_type is None:
base_type = self.base_type_node.analyse(env)
if base_type.is_error: return base_type
if base_type.is_cpp_class:
# Templated class
if self.keyword_args and self.keyword_args.key_value_pairs:
error(self.pos, "c++ templates cannot take keyword arguments");
self.type = PyrexTypes.error_type
else:
template_types = []
for template_node in self.positional_args:
type = template_node.analyse_as_type(env)
if type is None:
error(template_node.pos, "unknown type in template argument")
return error_type
template_types.append(type)
self.type = base_type.specialize_here(self.pos, template_types)
elif base_type.is_pyobject:
# Buffer
import Buffer
options = Buffer.analyse_buffer_options(
self.pos,
env,
self.positional_args,
self.keyword_args,
base_type.buffer_defaults)
if sys.version_info[0] < 3:
# Py 2.x enforces byte strings as keyword arguments ...
options = dict([ (name.encode('ASCII'), value)
for name, value in options.items() ])
self.type = PyrexTypes.BufferType(base_type, **options)
else:
# Array
empty_declarator = CNameDeclaratorNode(self.pos, name="", cname=None)
if len(self.positional_args) > 1 or self.keyword_args.key_value_pairs:
error(self.pos, "invalid array declaration")
self.type = PyrexTypes.error_type
else:
# It would be nice to merge this class with CArrayDeclaratorNode,
# but arrays are part of the declaration, not the type...
if not self.positional_args:
dimension = None
else:
dimension = self.positional_args[0]
self.array_declarator = CArrayDeclaratorNode(self.pos,
base = empty_declarator,
dimension = dimension)
self.type = self.array_declarator.analyse(base_type, env)[1]
return self.type
class CComplexBaseTypeNode(CBaseTypeNode):
# base_type CBaseTypeNode
# declarator CDeclaratorNode
child_attrs = ["base_type", "declarator"]
def analyse(self, env, could_be_name = False):
base = self.base_type.analyse(env, could_be_name)
_, type = self.declarator.analyse(base, env)
return type
class CVarDefNode(StatNode):
# C variable definition or forward/extern function declaration.
#
# visibility 'private' or 'public' or 'extern'
# base_type CBaseTypeNode
# declarators [CDeclaratorNode]
# in_pxd boolean
# api boolean
# decorators [cython.locals(...)] or None
# directive_locals { string : NameNode } locals defined by cython.locals(...)
child_attrs = ["base_type", "declarators"]
decorators = None
directive_locals = None
def analyse_declarations(self, env, dest_scope = None):
if self.directive_locals is None:
self.directive_locals = {}
if not dest_scope:
dest_scope = env
self.dest_scope = dest_scope
base_type = self.base_type.analyse(env)
visibility = self.visibility
for declarator in self.declarators:
if isinstance(declarator, CFuncDeclaratorNode):
name_declarator, type = declarator.analyse(base_type, env, directive_locals=self.directive_locals)
else:
name_declarator, type = declarator.analyse(base_type, env)
if not type.is_complete():
if not (self.visibility == 'extern' and type.is_array):
error(declarator.pos,
"Variable type '%s' is incomplete" % type)
if self.visibility == 'extern' and type.is_pyobject:
error(declarator.pos,
"Python object cannot be declared extern")
name = name_declarator.name
cname = name_declarator.cname
if name == '':
error(declarator.pos, "Missing name in declaration.")
return
if type.is_cfunction:
entry = dest_scope.declare_cfunction(name, type, declarator.pos,
cname = cname, visibility = self.visibility,
in_pxd = self.in_pxd, api = self.api)
if entry is not None:
entry.directive_locals = copy.copy(self.directive_locals)
else:
if self.directive_locals:
error(self.pos, "Decorators can only be followed by functions")
entry = dest_scope.declare_var(name, type, declarator.pos,
cname = cname, visibility = visibility,
in_pxd = self.in_pxd, api = self.api, is_cdef = 1)
class CStructOrUnionDefNode(StatNode):
# name string
# cname string or None
# kind "struct" or "union"
# typedef_flag boolean
# visibility "public" or "private"
# api boolean
# in_pxd boolean
# attributes [CVarDefNode] or None
# entry Entry
# packed boolean
child_attrs = ["attributes"]
def declare(self, env, scope=None):
if self.visibility == 'extern' and self.packed and not scope:
error(self.pos, "Cannot declare extern struct as 'packed'")
self.entry = env.declare_struct_or_union(
self.name, self.kind, scope, self.typedef_flag, self.pos,
self.cname, visibility = self.visibility, api = self.api,
packed = self.packed)
def analyse_declarations(self, env):
scope = None
if self.attributes is not None:
scope = StructOrUnionScope(self.name)
self.declare(env, scope)
if self.attributes is not None:
if self.in_pxd and not env.in_cinclude:
self.entry.defined_in_pxd = 1
for attr in self.attributes:
attr.analyse_declarations(env, scope)
if self.visibility != 'extern':
for attr in scope.var_entries:
type = attr.type
while type.is_array:
type = type.base_type
if type == self.entry.type:
error(attr.pos, "Struct cannot contain itself as a member.")
def analyse_expressions(self, env):
pass
def generate_execution_code(self, code):
pass
class CppClassNode(CStructOrUnionDefNode):
# name string
# cname string or None
# visibility "extern"
# in_pxd boolean
# attributes [CVarDefNode] or None
# entry Entry
# base_classes [string]
# templates [string] or None
def declare(self, env):
if self.templates is None:
template_types = None
else:
template_types = [PyrexTypes.TemplatePlaceholderType(template_name) for template_name in self.templates]
self.entry = env.declare_cpp_class(
self.name, None, self.pos,
self.cname, base_classes = [], visibility = self.visibility, templates = template_types)
def analyse_declarations(self, env):
scope = None
if self.attributes is not None:
scope = CppClassScope(self.name, env)
base_class_types = []
for base_class_name in self.base_classes:
base_class_entry = env.lookup(base_class_name)
if base_class_entry is None:
error(self.pos, "'%s' not found" % base_class_name)
elif not base_class_entry.is_type or not base_class_entry.type.is_cpp_class:
error(self.pos, "'%s' is not a cpp class type" % base_class_name)
else:
base_class_types.append(base_class_entry.type)
if self.templates is None:
template_types = None
else:
template_types = [PyrexTypes.TemplatePlaceholderType(template_name) for template_name in self.templates]
self.entry = env.declare_cpp_class(
self.name, scope, self.pos,
self.cname, base_class_types, visibility = self.visibility, templates = template_types)
if self.entry is None:
return
self.entry.is_cpp_class = 1
if self.attributes is not None:
if self.in_pxd and not env.in_cinclude:
self.entry.defined_in_pxd = 1
for attr in self.attributes:
attr.analyse_declarations(scope)
class CEnumDefNode(StatNode):
# name string or None
# cname string or None
# items [CEnumDefItemNode]
# typedef_flag boolean
# visibility "public" or "private"
# api boolean
# in_pxd boolean
# entry Entry
child_attrs = ["items"]
def declare(self, env):
self.entry = env.declare_enum(self.name, self.pos,
cname = self.cname, typedef_flag = self.typedef_flag,
visibility = self.visibility, api = self.api)
def analyse_declarations(self, env):
if self.items is not None:
if self.in_pxd and not env.in_cinclude:
self.entry.defined_in_pxd = 1
for item in self.items:
item.analyse_declarations(env, self.entry)
def analyse_expressions(self, env):
pass
def generate_execution_code(self, code):
if self.visibility == 'public' or self.api or self.is_overridable:
temp = code.funcstate.allocate_temp(PyrexTypes.py_object_type, manage_ref=True)
for item in self.entry.enum_values:
code.putln("%s = PyInt_FromLong(%s); %s" % (
temp,
item.cname,
code.error_goto_if_null(temp, item.pos)))
code.put_gotref(temp)
code.putln('if (__Pyx_SetAttrString(%s, "%s", %s) < 0) %s' % (
Naming.module_cname,
item.name,
temp,
code.error_goto(item.pos)))
code.put_decref_clear(temp, PyrexTypes.py_object_type)
code.funcstate.release_temp(temp)
class CEnumDefItemNode(StatNode):
# name string
# cname string or None
# value ExprNode or None
child_attrs = ["value"]
def analyse_declarations(self, env, enum_entry):
if self.value:
self.value.analyse_const_expression(env)
if not self.value.type.is_int:
self.value = self.value.coerce_to(PyrexTypes.c_int_type, env)
self.value.analyse_const_expression(env)
entry = env.declare_const(self.name, enum_entry.type,
self.value, self.pos, cname = self.cname,
visibility = enum_entry.visibility, api = enum_entry.api)
enum_entry.enum_values.append(entry)
class CTypeDefNode(StatNode):
# base_type CBaseTypeNode
# declarator CDeclaratorNode
# visibility "public" or "private"
# api boolean
# in_pxd boolean
child_attrs = ["base_type", "declarator"]
def analyse_declarations(self, env):
base = self.base_type.analyse(env)
name_declarator, type = self.declarator.analyse(base, env)
name = name_declarator.name
cname = name_declarator.cname
entry = env.declare_typedef(name, type, self.pos,
cname = cname, visibility = self.visibility, api = self.api)
if self.in_pxd and not env.in_cinclude:
entry.defined_in_pxd = 1
def analyse_expressions(self, env):
pass
def generate_execution_code(self, code):
pass
class FuncDefNode(StatNode, BlockNode):
# Base class for function definition nodes.
#
# return_type PyrexType
# #filename string C name of filename string const
# entry Symtab.Entry
# needs_closure boolean Whether or not this function has inner functions/classes/yield
# needs_outer_scope boolean Whether or not this function requires outer scope
# directive_locals { string : NameNode } locals defined by cython.locals(...)
# star_arg PyArgDeclNode or None * argument
# starstar_arg PyArgDeclNode or None ** argument
py_func = None
assmt = None
needs_closure = False
needs_outer_scope = False
is_generator = False
is_generator_body = False
modifiers = []
star_arg = None
starstar_arg = None
def analyse_default_values(self, env):
genv = env.global_scope()
default_seen = 0
for arg in self.args:
if arg.default:
default_seen = 1
if arg.is_generic:
arg.default.analyse_types(env)
arg.default = arg.default.coerce_to(arg.type, genv)
else:
error(arg.pos,
"This argument cannot have a default value")
arg.default = None
elif arg.kw_only:
default_seen = 1
elif default_seen:
error(arg.pos, "Non-default argument following default argument")
def align_argument_type(self, env, arg):
directive_locals = self.directive_locals
type = arg.type
if arg.name in directive_locals:
type_node = directive_locals[arg.name]
other_type = type_node.analyse_as_type(env)
if other_type is None:
error(type_node.pos, "Not a type")
elif (type is not PyrexTypes.py_object_type
and not type.same_as(other_type)):
error(arg.base_type.pos, "Signature does not agree with previous declaration")
error(type_node.pos, "Previous declaration here")
else:
arg.type = other_type
return arg
def need_gil_acquisition(self, lenv):
return 0
def create_local_scope(self, env):
genv = env
while genv.is_py_class_scope or genv.is_c_class_scope:
genv = genv.outer_scope
if self.needs_closure:
lenv = ClosureScope(name=self.entry.name,
outer_scope = genv,
scope_name=self.entry.cname)
else:
lenv = LocalScope(name=self.entry.name,
outer_scope=genv,
parent_scope=env)
lenv.return_type = self.return_type
type = self.entry.type
if type.is_cfunction:
lenv.nogil = type.nogil and not type.with_gil
self.local_scope = lenv
lenv.directives = env.directives
return lenv
def generate_function_body(self, env, code):
self.body.generate_execution_code(code)
def generate_function_definitions(self, env, code):
import Buffer
lenv = self.local_scope
if lenv.is_closure_scope and not lenv.is_passthrough:
outer_scope_cname = "%s->%s" % (Naming.cur_scope_cname,
Naming.outer_scope_cname)
else:
outer_scope_cname = Naming.outer_scope_cname
lenv.mangle_closure_cnames(outer_scope_cname)
# Generate closure function definitions
self.body.generate_function_definitions(lenv, code)
# generate lambda function definitions
self.generate_lambda_definitions(lenv, code)
is_getbuffer_slot = (self.entry.name == "__getbuffer__" and
self.entry.scope.is_c_class_scope)
is_releasebuffer_slot = (self.entry.name == "__releasebuffer__" and
self.entry.scope.is_c_class_scope)
is_buffer_slot = is_getbuffer_slot or is_releasebuffer_slot
if is_buffer_slot:
if 'cython_unused' not in self.modifiers:
self.modifiers = self.modifiers + ['cython_unused']
preprocessor_guard = None
if self.entry.is_special and not is_buffer_slot:
slot = TypeSlots.method_name_to_slot.get(self.entry.name)
if slot:
preprocessor_guard = slot.preprocessor_guard_code()
if (self.entry.name == '__long__' and
not self.entry.scope.lookup_here('__int__')):
preprocessor_guard = None
profile = code.globalstate.directives['profile']
if profile and lenv.nogil:
warning(self.pos, "Cannot profile nogil function.", 1)
profile = False
if profile:
code.globalstate.use_utility_code(profile_utility_code)
# Generate C code for header and body of function
code.enter_cfunc_scope()
code.return_from_error_cleanup_label = code.new_label()
# ----- Top-level constants used by this function
code.mark_pos(self.pos)
self.generate_cached_builtins_decls(lenv, code)
# ----- Function header
code.putln("")
if preprocessor_guard:
code.putln(preprocessor_guard)
with_pymethdef = self.needs_assignment_synthesis(env, code)
if self.py_func:
self.py_func.generate_function_header(code,
with_pymethdef = with_pymethdef,
proto_only=True)
self.generate_function_header(code,
with_pymethdef = with_pymethdef)
# ----- Local variable declarations
# Find function scope
cenv = env
while cenv.is_py_class_scope or cenv.is_c_class_scope:
cenv = cenv.outer_scope
if self.needs_closure:
code.put(lenv.scope_class.type.declaration_code(Naming.cur_scope_cname))
code.putln(";")
elif self.needs_outer_scope:
if lenv.is_passthrough:
code.put(lenv.scope_class.type.declaration_code(Naming.cur_scope_cname))
code.putln(";")
code.put(cenv.scope_class.type.declaration_code(Naming.outer_scope_cname))
code.putln(";")
self.generate_argument_declarations(lenv, code)
for entry in lenv.var_entries:
if not entry.in_closure:
code.put_var_declaration(entry)
init = ""
if not self.return_type.is_void:
if self.return_type.is_pyobject:
init = " = NULL"
code.putln(
"%s%s;" %
(self.return_type.declaration_code(Naming.retval_cname),
init))
tempvardecl_code = code.insertion_point()
self.generate_keyword_list(code)
if profile:
code.put_trace_declarations()
# ----- Extern library function declarations
lenv.generate_library_function_declarations(code)
# ----- GIL acquisition
acquire_gil = self.acquire_gil
# See if we need to acquire the GIL for variable declarations and
acquire_gil_for_var_decls_only = (lenv.nogil and
lenv.has_with_gil_block)
use_refnanny = not lenv.nogil or acquire_gil_for_var_decls_only
if acquire_gil or acquire_gil_for_var_decls_only:
code.put_ensure_gil()
# ----- set up refnanny
if use_refnanny:
tempvardecl_code.put_declare_refcount_context()
code.put_setup_refcount_context(self.entry.name)
# ----- Automatic lead-ins for certain special functions
if is_getbuffer_slot:
self.getbuffer_init(code)
# ----- Create closure scope object
if self.needs_closure:
code.putln("%s = (%s)%s->tp_new(%s, %s, NULL);" % (
Naming.cur_scope_cname,
lenv.scope_class.type.declaration_code(''),
lenv.scope_class.type.typeptr_cname,
lenv.scope_class.type.typeptr_cname,
Naming.empty_tuple))
code.putln("if (unlikely(!%s)) {" % Naming.cur_scope_cname)
if is_getbuffer_slot:
self.getbuffer_error_cleanup(code)
if use_refnanny:
code.put_finish_refcount_context()
if acquire_gil_for_var_decls_only:
code.put_release_ensured_gil()
# FIXME: what if the error return value is a Python value?
code.putln("return %s;" % self.error_value())
code.putln("}")
code.put_gotref(Naming.cur_scope_cname)
# Note that it is unsafe to decref the scope at this point.
if self.needs_outer_scope:
code.putln("%s = (%s)%s;" % (
outer_scope_cname,
cenv.scope_class.type.declaration_code(''),
Naming.self_cname))
if lenv.is_passthrough:
code.putln("%s = %s;" % (Naming.cur_scope_cname, outer_scope_cname));
elif self.needs_closure:
# inner closures own a reference to their outer parent
code.put_incref(outer_scope_cname, cenv.scope_class.type)
code.put_giveref(outer_scope_cname)
# ----- Trace function call
if profile:
# this looks a bit late, but if we don't get here due to a
# fatal error before hand, it's not really worth tracing
code.put_trace_call(self.entry.name, self.pos)
# ----- Fetch arguments
self.generate_argument_parsing_code(env, code)
# If an argument is assigned to in the body, we must
# incref it to properly keep track of refcounts.
for entry in lenv.arg_entries:
if entry.type.is_pyobject:
if (acquire_gil or entry.assignments) and not entry.in_closure:
code.put_var_incref(entry)
# ----- Initialise local buffer auxiliary variables
for entry in lenv.var_entries + lenv.arg_entries:
if entry.type.is_buffer and entry.buffer_aux.buffer_info_var.used:
code.putln("%s.buf = NULL;" %
entry.buffer_aux.buffer_info_var.cname)
# ----- Check and convert arguments
self.generate_argument_type_tests(code)
# ----- Acquire buffer arguments
for entry in lenv.arg_entries:
if entry.type.is_buffer:
Buffer.put_acquire_arg_buffer(entry, code, self.pos)
if acquire_gil_for_var_decls_only:
code.put_release_ensured_gil()
# -------------------------
# ----- Function body -----
# -------------------------
self.generate_function_body(env, code)
# ----- Default return value
code.putln("")
if self.return_type.is_pyobject:
#if self.return_type.is_extension_type:
# lhs = "(PyObject *)%s" % Naming.retval_cname
#else:
lhs = Naming.retval_cname
code.put_init_to_py_none(lhs, self.return_type)
else:
val = self.return_type.default_value
if val:
code.putln("%s = %s;" % (Naming.retval_cname, val))
# ----- Error cleanup
if code.error_label in code.labels_used:
code.put_goto(code.return_label)
code.put_label(code.error_label)
for cname, type in code.funcstate.all_managed_temps():
code.put_xdecref(cname, type)
# Clean up buffers -- this calls a Python function
# so need to save and restore error state
buffers_present = len(lenv.buffer_entries) > 0
if buffers_present:
code.globalstate.use_utility_code(restore_exception_utility_code)
code.putln("{ PyObject *__pyx_type, *__pyx_value, *__pyx_tb;")
code.putln("__Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb);")
for entry in lenv.buffer_entries:
Buffer.put_release_buffer_code(code, entry)
#code.putln("%s = 0;" % entry.cname)
code.putln("__Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);}")
err_val = self.error_value()
exc_check = self.caller_will_check_exceptions()
if err_val is not None or exc_check:
# TODO: Fix exception tracing (though currently unused by cProfile).
# code.globalstate.use_utility_code(get_exception_tuple_utility_code)
# code.put_trace_exception()
code.put_add_traceback(self.entry.qualified_name)
else:
warning(self.entry.pos, "Unraisable exception in function '%s'." \
% self.entry.qualified_name, 0)
format_tuple = (
self.entry.qualified_name,
Naming.clineno_cname,
Naming.lineno_cname,
Naming.filename_cname,
)
code.putln(
'__Pyx_WriteUnraisable("%s", %s, %s, %s);' % format_tuple)
env.use_utility_code(unraisable_exception_utility_code)
env.use_utility_code(restore_exception_utility_code)
default_retval = self.return_type.default_value
if err_val is None and default_retval:
err_val = default_retval
if err_val is not None:
code.putln("%s = %s;" % (Naming.retval_cname, err_val))
if is_getbuffer_slot:
self.getbuffer_error_cleanup(code)
# If we are using the non-error cleanup section we should
# jump past it if we have an error. The if-test below determine
# whether this section is used.
if buffers_present or is_getbuffer_slot:
code.put_goto(code.return_from_error_cleanup_label)
# ----- Non-error return cleanup
code.put_label(code.return_label)
for entry in lenv.buffer_entries:
if entry.used:
Buffer.put_release_buffer_code(code, entry)
if is_getbuffer_slot:
self.getbuffer_normal_cleanup(code)
# ----- Return cleanup for both error and no-error return
code.put_label(code.return_from_error_cleanup_label)
for entry in lenv.var_entries:
if entry.type.is_pyobject:
if entry.used and not entry.in_closure:
code.put_var_decref(entry)
# Decref any increfed args
for entry in lenv.arg_entries:
if entry.type.is_pyobject:
if (acquire_gil or entry.assignments) and not entry.in_closure:
code.put_var_decref(entry)
if self.needs_closure:
code.put_decref(Naming.cur_scope_cname, lenv.scope_class.type)
# ----- Return
# This code is duplicated in ModuleNode.generate_module_init_func
if not lenv.nogil:
default_retval = self.return_type.default_value
err_val = self.error_value()
if err_val is None and default_retval:
err_val = default_retval
if self.return_type.is_pyobject:
code.put_xgiveref(self.return_type.as_pyobject(Naming.retval_cname))
if self.entry.is_special and self.entry.name == "__hash__":
# Returning -1 for __hash__ is supposed to signal an error
# We do as Python instances and coerce -1 into -2.
code.putln("if (unlikely(%s == -1) && !PyErr_Occurred()) %s = -2;" % (
Naming.retval_cname, Naming.retval_cname))
if profile:
if self.return_type.is_pyobject:
code.put_trace_return(Naming.retval_cname)
else:
code.put_trace_return("Py_None")
if not lenv.nogil:
# GIL holding funcion
code.put_finish_refcount_context()
if acquire_gil or acquire_gil_for_var_decls_only:
code.put_release_ensured_gil()
if not self.return_type.is_void:
code.putln("return %s;" % Naming.retval_cname)
code.putln("}")
if preprocessor_guard:
code.putln("#endif /*!(%s)*/" % preprocessor_guard)
# ----- Go back and insert temp variable declarations
tempvardecl_code.put_temp_declarations(code.funcstate)
if code.funcstate.should_declare_error_indicator:
# Initialize these variables to shut up compiler warnings
tempvardecl_code.putln("int %s = 0;" % Naming.lineno_cname)
tempvardecl_code.putln("const char *%s = NULL;" %
Naming.filename_cname)
if code.c_line_in_traceback:
tempvardecl_code.putln("int %s = 0;" % Naming.clineno_cname)
# ----- Python version
code.exit_cfunc_scope()
if self.py_func:
self.py_func.generate_function_definitions(env, code)
self.generate_wrapper_functions(code)
def declare_argument(self, env, arg):
if arg.type.is_void:
error(arg.pos, "Invalid use of 'void'")
elif not arg.type.is_complete() and not arg.type.is_array:
error(arg.pos,
"Argument type '%s' is incomplete" % arg.type)
return env.declare_arg(arg.name, arg.type, arg.pos)
def generate_arg_type_test(self, arg, code):
# Generate type test for one argument.
if arg.type.typeobj_is_available():
code.globalstate.use_utility_code(arg_type_test_utility_code)
typeptr_cname = arg.type.typeptr_cname
arg_code = "((PyObject *)%s)" % arg.entry.cname
code.putln(
'if (unlikely(!__Pyx_ArgTypeTest(%s, %s, %d, "%s", %s))) %s' % (
arg_code,
typeptr_cname,
arg.accept_none,
arg.name,
arg.type.is_builtin_type,
code.error_goto(arg.pos)))
else:
error(arg.pos, "Cannot test type of extern C class "
"without type object name specification")
def generate_arg_none_check(self, arg, code):
# Generate None check for one argument.
code.putln('if (unlikely(((PyObject *)%s) == Py_None)) {' % arg.entry.cname)
code.putln('''PyErr_Format(PyExc_TypeError, "Argument '%s' must not be None"); %s''' % (
arg.name,
code.error_goto(arg.pos)))
code.putln('}')
def generate_wrapper_functions(self, code):
pass
def generate_execution_code(self, code):
# Evaluate and store argument default values
for arg in self.args:
default = arg.default
if default:
if not default.is_literal:
default.generate_evaluation_code(code)
default.make_owned_reference(code)
result = default.result_as(arg.type)
code.putln(
"%s = %s;" % (
arg.calculate_default_value_code(code),
result))
if arg.type.is_pyobject:
code.put_giveref(default.result())
default.generate_post_assignment_code(code)
default.free_temps(code)
# For Python class methods, create and store function object
if self.assmt:
self.assmt.generate_execution_code(code)
#
# Special code for the __getbuffer__ function
#
def getbuffer_init(self, code):
info = self.local_scope.arg_entries[1].cname
# Python 3.0 betas have a bug in memoryview which makes it call
# getbuffer with a NULL parameter. For now we work around this;
# the following block should be removed when this bug is fixed.
code.putln("if (%s != NULL) {" % info)
code.putln("%s->obj = Py_None; __Pyx_INCREF(Py_None);" % info)
code.put_giveref("%s->obj" % info) # Do not refnanny object within structs
code.putln("}")
def getbuffer_error_cleanup(self, code):
info = self.local_scope.arg_entries[1].cname
code.putln("if (%s != NULL && %s->obj != NULL) {"
% (info, info))
code.put_gotref("%s->obj" % info)
code.putln("__Pyx_DECREF(%s->obj); %s->obj = NULL;"
% (info, info))
code.putln("}")
def getbuffer_normal_cleanup(self, code):
info = self.local_scope.arg_entries[1].cname
code.putln("if (%s != NULL && %s->obj == Py_None) {" % (info, info))
code.put_gotref("Py_None")
code.putln("__Pyx_DECREF(Py_None); %s->obj = NULL;" % info)
code.putln("}")
class CFuncDefNode(FuncDefNode):
# C function definition.
#
# modifiers ['inline']
# visibility 'private' or 'public' or 'extern'
# base_type CBaseTypeNode
# declarator CDeclaratorNode
# body StatListNode
# api boolean
# decorators [DecoratorNode] list of decorators
#
# with_gil boolean Acquire GIL around body
# type CFuncType
# py_func wrapper for calling from Python
# overridable whether or not this is a cpdef function
# inline_in_pxd whether this is an inline function in a pxd file
child_attrs = ["base_type", "declarator", "body", "py_func"]
inline_in_pxd = False
decorators = None
directive_locals = None
def unqualified_name(self):
return self.entry.name
def analyse_declarations(self, env):
if self.directive_locals is None:
self.directive_locals = {}
self.directive_locals.update(env.directives['locals'])
base_type = self.base_type.analyse(env)
# The 2 here is because we need both function and argument names.
if isinstance(self.declarator, CFuncDeclaratorNode):
name_declarator, type = self.declarator.analyse(base_type, env,
nonempty = 2 * (self.body is not None),
directive_locals = self.directive_locals)
else:
name_declarator, type = self.declarator.analyse(base_type, env, nonempty = 2 * (self.body is not None))
if not type.is_cfunction:
error(self.pos,
"Suite attached to non-function declaration")
# Remember the actual type according to the function header
# written here, because the type in the symbol table entry
# may be different if we're overriding a C method inherited
# from the base type of an extension type.
self.type = type
type.is_overridable = self.overridable
declarator = self.declarator
while not hasattr(declarator, 'args'):
declarator = declarator.base
self.args = declarator.args
for formal_arg, type_arg in zip(self.args, type.args):
self.align_argument_type(env, type_arg)
formal_arg.type = type_arg.type
formal_arg.name = type_arg.name
formal_arg.cname = type_arg.cname
if type_arg.type.is_buffer and 'inline' in self.modifiers:
warning(formal_arg.pos, "Buffer unpacking not optimized away.", 1)
name = name_declarator.name
cname = name_declarator.cname
self.entry = env.declare_cfunction(
name, type, self.pos,
cname = cname, visibility = self.visibility, api = self.api,
defining = self.body is not None, modifiers = self.modifiers)
self.entry.inline_func_in_pxd = self.inline_in_pxd
self.return_type = type.return_type
if self.return_type.is_array and visibility != 'extern':
error(self.pos,
"Function cannot return an array")
if self.overridable and not env.is_module_scope:
if len(self.args) < 1 or not self.args[0].type.is_pyobject:
# An error will be produced in the cdef function
self.overridable = False
if self.overridable:
import ExprNodes
py_func_body = self.call_self_node(is_module_scope = env.is_module_scope)
self.py_func = DefNode(pos = self.pos,
name = self.entry.name,
args = self.args,
star_arg = None,
starstar_arg = None,
doc = self.doc,
body = py_func_body,
is_wrapper = 1)
self.py_func.is_module_scope = env.is_module_scope
self.py_func.analyse_declarations(env)
self.entry.as_variable = self.py_func.entry
# Reset scope entry the above cfunction
env.entries[name] = self.entry
if not env.is_module_scope or Options.lookup_module_cpdef:
self.override = OverrideCheckNode(self.pos, py_func = self.py_func)
self.body = StatListNode(self.pos, stats=[self.override, self.body])
self.create_local_scope(env)
def call_self_node(self, omit_optional_args=0, is_module_scope=0):
import ExprNodes
args = self.type.args
if omit_optional_args:
args = args[:len(args) - self.type.optional_arg_count]
arg_names = [arg.name for arg in args]
if is_module_scope:
cfunc = ExprNodes.NameNode(self.pos, name=self.entry.name)
else:
self_arg = ExprNodes.NameNode(self.pos, name=arg_names[0])
cfunc = ExprNodes.AttributeNode(self.pos, obj=self_arg, attribute=self.entry.name)
skip_dispatch = not is_module_scope or Options.lookup_module_cpdef
c_call = ExprNodes.SimpleCallNode(self.pos, function=cfunc, args=[ExprNodes.NameNode(self.pos, name=n) for n in arg_names[1-is_module_scope:]], wrapper_call=skip_dispatch)
return ReturnStatNode(pos=self.pos, return_type=PyrexTypes.py_object_type, value=c_call)
def declare_arguments(self, env):
for arg in self.type.args:
if not arg.name:
error(arg.pos, "Missing argument name")
self.declare_argument(env, arg)
def need_gil_acquisition(self, lenv):
return self.type.with_gil
def nogil_check(self, env):
type = self.type
with_gil = type.with_gil
if type.nogil and not with_gil:
if type.return_type.is_pyobject:
error(self.pos,
"Function with Python return type cannot be declared nogil")
for entry in self.local_scope.var_entries:
if entry.type.is_pyobject and not entry.in_with_gil_block:
error(self.pos, "Function declared nogil has Python locals or temporaries")
def analyse_expressions(self, env):
self.local_scope.directives = env.directives
if self.py_func is not None:
# this will also analyse the default values
self.py_func.analyse_expressions(env)
else:
self.analyse_default_values(env)
self.acquire_gil = self.need_gil_acquisition(self.local_scope)
def needs_assignment_synthesis(self, env, code=None):
return False
def generate_function_header(self, code, with_pymethdef, with_opt_args = 1, with_dispatch = 1, cname = None):
arg_decls = []
type = self.type
for arg in type.args[:len(type.args)-type.optional_arg_count]:
arg_decls.append(arg.declaration_code())
if with_dispatch and self.overridable:
arg_decls.append(PyrexTypes.c_int_type.declaration_code(Naming.skip_dispatch_cname))
if type.optional_arg_count and with_opt_args:
arg_decls.append(type.op_arg_struct.declaration_code(Naming.optional_args_cname))
if type.has_varargs:
arg_decls.append("...")
if not arg_decls:
arg_decls = ["void"]
if cname is None:
cname = self.entry.func_cname
entity = type.function_header_code(cname, ', '.join(arg_decls))
if self.entry.visibility == 'private':
storage_class = "static "
else:
storage_class = ""
dll_linkage = None
modifiers = ""
if 'inline' in self.modifiers:
self.modifiers[self.modifiers.index('inline')] = 'cython_inline'
if self.modifiers:
modifiers = "%s " % ' '.join(self.modifiers).upper()
header = self.return_type.declaration_code(entity, dll_linkage=dll_linkage)
#print (storage_class, modifiers, header)
code.putln("%s%s%s {" % (storage_class, modifiers, header))
def generate_argument_declarations(self, env, code):
for arg in self.args:
if arg.default:
result = arg.calculate_default_value_code(code)
code.putln('%s = %s;' % (
arg.type.declaration_code(arg.cname), result))
def generate_keyword_list(self, code):
pass
def generate_argument_parsing_code(self, env, code):
i = 0
if self.type.optional_arg_count:
code.putln('if (%s) {' % Naming.optional_args_cname)
for arg in self.args:
if arg.default:
code.putln('if (%s->%sn > %s) {' % (Naming.optional_args_cname, Naming.pyrex_prefix, i))
declarator = arg.declarator
while not hasattr(declarator, 'name'):
declarator = declarator.base
code.putln('%s = %s->%s;' % (arg.cname, Naming.optional_args_cname, self.type.opt_arg_cname(declarator.name)))
i += 1
for _ in range(self.type.optional_arg_count):
code.putln('}')
code.putln('}')
def generate_argument_conversion_code(self, code):
pass
def generate_argument_type_tests(self, code):
# Generate type tests for args whose type in a parent
# class is a supertype of the declared type.
for arg in self.type.args:
if arg.needs_type_test:
self.generate_arg_type_test(arg, code)
elif arg.type.is_pyobject and not arg.accept_none:
self.generate_arg_none_check(arg, code)
def error_value(self):
if self.return_type.is_pyobject:
return "0"
else:
#return None
return self.entry.type.exception_value
def caller_will_check_exceptions(self):
return self.entry.type.exception_check
def generate_wrapper_functions(self, code):
# If the C signature of a function has changed, we need to generate
# wrappers to put in the slots here.
k = 0
entry = self.entry
func_type = entry.type
while entry.prev_entry is not None:
k += 1
entry = entry.prev_entry
entry.func_cname = "%s%swrap_%s" % (self.entry.func_cname, Naming.pyrex_prefix, k)
code.putln()
self.generate_function_header(code,
0,
with_dispatch = entry.type.is_overridable,
with_opt_args = entry.type.optional_arg_count,
cname = entry.func_cname)
if not self.return_type.is_void:
code.put('return ')
args = self.type.args
arglist = [arg.cname for arg in args[:len(args)-self.type.optional_arg_count]]
if entry.type.is_overridable:
arglist.append(Naming.skip_dispatch_cname)
elif func_type.is_overridable:
arglist.append('0')
if entry.type.optional_arg_count:
arglist.append(Naming.optional_args_cname)
elif func_type.optional_arg_count:
arglist.append('NULL')
code.putln('%s(%s);' % (self.entry.func_cname, ', '.join(arglist)))
code.putln('}')
class PyArgDeclNode(Node):
# Argument which must be a Python object (used
# for * and ** arguments).
#
# name string
# entry Symtab.Entry
# annotation ExprNode or None Py3 argument annotation
child_attrs = []
def generate_function_definitions(self, env, code):
self.entry.generate_function_definitions(env, code)
class DecoratorNode(Node):
# A decorator
#
# decorator NameNode or CallNode or AttributeNode
child_attrs = ['decorator']
class DefNode(FuncDefNode):
# A Python function definition.
#
# name string the Python name of the function
# lambda_name string the internal name of a lambda 'function'
# decorators [DecoratorNode] list of decorators
# args [CArgDeclNode] formal arguments
# doc EncodedString or None
# body StatListNode
# return_type_annotation
# ExprNode or None the Py3 return type annotation
#
# The following subnode is constructed internally
# when the def statement is inside a Python class definition.
#
# assmt AssignmentNode Function construction/assignment
child_attrs = ["args", "star_arg", "starstar_arg", "body", "decorators"]
lambda_name = None
assmt = None
num_kwonly_args = 0
num_required_kw_args = 0
reqd_kw_flags_cname = "0"
is_wrapper = 0
no_assignment_synthesis = 0
decorators = None
return_type_annotation = None
entry = None
acquire_gil = 0
self_in_stararg = 0
doc = None
def __init__(self, pos, **kwds):
FuncDefNode.__init__(self, pos, **kwds)
k = rk = r = 0
for arg in self.args:
if arg.kw_only:
k += 1
if not arg.default:
rk += 1
if not arg.default:
r += 1
self.num_kwonly_args = k
self.num_required_kw_args = rk
self.num_required_args = r
def as_cfunction(self, cfunc=None, scope=None, overridable=True):
if self.star_arg:
error(self.star_arg.pos, "cdef function cannot have star argument")
if self.starstar_arg:
error(self.starstar_arg.pos, "cdef function cannot have starstar argument")
if cfunc is None:
cfunc_args = []
for formal_arg in self.args:
name_declarator, type = formal_arg.analyse(scope, nonempty=1)
cfunc_args.append(PyrexTypes.CFuncTypeArg(name = name_declarator.name,
cname = None,
type = py_object_type,
pos = formal_arg.pos))
cfunc_type = PyrexTypes.CFuncType(return_type = py_object_type,
args = cfunc_args,
has_varargs = False,
exception_value = None,
exception_check = False,
nogil = False,
with_gil = False,
is_overridable = overridable)
cfunc = CVarDefNode(self.pos, type=cfunc_type)
else:
if scope is None:
scope = cfunc.scope
cfunc_type = cfunc.type
if len(self.args) != len(cfunc_type.args) or cfunc_type.has_varargs:
error(self.pos, "wrong number of arguments")
error(cfunc.pos, "previous declaration here")
for i, (formal_arg, type_arg) in enumerate(zip(self.args, cfunc_type.args)):
name_declarator, type = formal_arg.analyse(scope, nonempty=1,
is_self_arg = (i == 0 and scope.is_c_class_scope))
if type is None or type is PyrexTypes.py_object_type:
formal_arg.type = type_arg.type
formal_arg.name_declarator = name_declarator
import ExprNodes
if cfunc_type.exception_value is None:
exception_value = None
else:
exception_value = ExprNodes.ConstNode(self.pos, value=cfunc_type.exception_value, type=cfunc_type.return_type)
declarator = CFuncDeclaratorNode(self.pos,
base = CNameDeclaratorNode(self.pos, name=self.name, cname=None),
args = self.args,
has_varargs = False,
exception_check = cfunc_type.exception_check,
exception_value = exception_value,
with_gil = cfunc_type.with_gil,
nogil = cfunc_type.nogil)
return CFuncDefNode(self.pos,
modifiers = [],
base_type = CAnalysedBaseTypeNode(self.pos, type=cfunc_type.return_type),
declarator = declarator,
body = self.body,
doc = self.doc,
overridable = cfunc_type.is_overridable,
type = cfunc_type,
with_gil = cfunc_type.with_gil,
nogil = cfunc_type.nogil,
visibility = 'private',
api = False,
directive_locals = getattr(cfunc, 'directive_locals', {}))
def is_cdef_func_compatible(self):
"""Determines if the function's signature is compatible with a
cdef function. This can be used before calling
.as_cfunction() to see if that will be successful.
"""
if self.needs_closure:
return False
if self.star_arg or self.starstar_arg:
return False
return True
def analyse_declarations(self, env):
self.is_classmethod = self.is_staticmethod = False
if self.decorators:
for decorator in self.decorators:
func = decorator.decorator
if func.is_name:
self.is_classmethod |= func.name == 'classmethod'
self.is_staticmethod |= func.name == 'staticmethod'
if self.is_classmethod and env.lookup_here('classmethod'):
# classmethod() was overridden - not much we can do here ...
self.is_classmethod = False
if self.is_staticmethod and env.lookup_here('staticmethod'):
# staticmethod() was overridden - not much we can do here ...
self.is_staticmethod = False
if self.name == '__new__' and env.is_py_class_scope:
self.is_staticmethod = 1
self.analyse_argument_types(env)
if self.name == '<lambda>':
self.declare_lambda_function(env)
else:
self.declare_pyfunction(env)
self.analyse_signature(env)
self.return_type = self.entry.signature.return_type()
self.create_local_scope(env)
def analyse_argument_types(self, env):
directive_locals = self.directive_locals = env.directives['locals']
allow_none_for_extension_args = env.directives['allow_none_for_extension_args']
for arg in self.args:
if hasattr(arg, 'name'):
name_declarator = None
else:
base_type = arg.base_type.analyse(env)
name_declarator, type = \
arg.declarator.analyse(base_type, env)
arg.name = name_declarator.name
arg.type = type
self.align_argument_type(env, arg)
if name_declarator and name_declarator.cname:
error(self.pos,
"Python function argument cannot have C name specification")
arg.type = arg.type.as_argument_type()
arg.hdr_type = None
arg.needs_conversion = 0
arg.needs_type_test = 0
arg.is_generic = 1
if arg.type.is_pyobject:
if arg.or_none:
arg.accept_none = True
elif arg.not_none:
arg.accept_none = False
elif arg.type.is_extension_type or arg.type.is_builtin_type:
if arg.default and arg.default.constant_result is None:
# special case: def func(MyType obj = None)
arg.accept_none = True
else:
# default depends on compiler directive
arg.accept_none = allow_none_for_extension_args
else:
# probably just a plain 'object'
arg.accept_none = True
else:
arg.accept_none = True # won't be used, but must be there
if arg.not_none:
error(arg.pos, "Only Python type arguments can have 'not None'")
if arg.or_none:
error(arg.pos, "Only Python type arguments can have 'or None'")
def analyse_signature(self, env):
if self.entry.is_special:
if self.decorators:
error(self.pos, "special functions of cdef classes cannot have decorators")
self.entry.trivial_signature = len(self.args) == 1 and not (self.star_arg or self.starstar_arg)
elif not env.directives['always_allow_keywords'] and not (self.star_arg or self.starstar_arg):
# Use the simpler calling signature for zero- and one-argument functions.
if self.entry.signature is TypeSlots.pyfunction_signature:
if len(self.args) == 0:
self.entry.signature = TypeSlots.pyfunction_noargs
elif len(self.args) == 1:
if self.args[0].default is None and not self.args[0].kw_only:
self.entry.signature = TypeSlots.pyfunction_onearg
elif self.entry.signature is TypeSlots.pymethod_signature:
if len(self.args) == 1:
self.entry.signature = TypeSlots.unaryfunc
elif len(self.args) == 2:
if self.args[1].default is None and not self.args[1].kw_only:
self.entry.signature = TypeSlots.ibinaryfunc
sig = self.entry.signature
nfixed = sig.num_fixed_args()
if sig is TypeSlots.pymethod_signature and nfixed == 1 \
and len(self.args) == 0 and self.star_arg:
# this is the only case where a diverging number of
# arguments is not an error - when we have no explicit
# 'self' parameter as in method(*args)
sig = self.entry.signature = TypeSlots.pyfunction_signature # self is not 'really' used
self.self_in_stararg = 1
nfixed = 0
for i in range(min(nfixed, len(self.args))):
arg = self.args[i]
arg.is_generic = 0
if sig.is_self_arg(i) and not self.is_staticmethod:
if self.is_classmethod:
arg.is_type_arg = 1
arg.hdr_type = arg.type = Builtin.type_type
else:
arg.is_self_arg = 1
arg.hdr_type = arg.type = env.parent_type
arg.needs_conversion = 0
else:
arg.hdr_type = sig.fixed_arg_type(i)
if not arg.type.same_as(arg.hdr_type):
if arg.hdr_type.is_pyobject and arg.type.is_pyobject:
arg.needs_type_test = 1
else:
arg.needs_conversion = 1
if arg.needs_conversion:
arg.hdr_cname = Naming.arg_prefix + arg.name
else:
arg.hdr_cname = Naming.var_prefix + arg.name
if nfixed > len(self.args):
self.bad_signature()
return
elif nfixed < len(self.args):
if not sig.has_generic_args:
self.bad_signature()
for arg in self.args:
if arg.is_generic and \
(arg.type.is_extension_type or arg.type.is_builtin_type):
arg.needs_type_test = 1
def bad_signature(self):
sig = self.entry.signature
expected_str = "%d" % sig.num_fixed_args()
if sig.has_generic_args:
expected_str = expected_str + " or more"
name = self.name
if name.startswith("__") and name.endswith("__"):
desc = "Special method"
else:
desc = "Method"
error(self.pos,
"%s %s has wrong number of arguments "
"(%d declared, %s expected)" % (
desc, self.name, len(self.args), expected_str))
def signature_has_nongeneric_args(self):
argcount = len(self.args)
if argcount == 0 or (
argcount == 1 and (self.args[0].is_self_arg or
self.args[0].is_type_arg)):
return 0
return 1
def signature_has_generic_args(self):
return self.entry.signature.has_generic_args
def declare_pyfunction(self, env):
#print "DefNode.declare_pyfunction:", self.name, "in", env ###
name = self.name
entry = env.lookup_here(name)
if entry and entry.type.is_cfunction and not self.is_wrapper:
warning(self.pos, "Overriding cdef method with def method.", 5)
entry = env.declare_pyfunction(name, self.pos, allow_redefine=not self.is_wrapper)
self.entry = entry
prefix = env.next_id(env.scope_prefix)
entry.func_cname = \
Naming.pyfunc_prefix + prefix + name
entry.pymethdef_cname = \
Naming.pymethdef_prefix + prefix + name
if Options.docstrings:
entry.doc = embed_position(self.pos, self.doc)
entry.doc_cname = \
Naming.funcdoc_prefix + prefix + name
if entry.is_special:
if entry.name in TypeSlots.invisible or not entry.doc or (entry.name in '__getattr__' and env.directives['fast_getattr']):
entry.wrapperbase_cname = None
else:
entry.wrapperbase_cname = Naming.wrapperbase_prefix + prefix + name
else:
entry.doc = None
def declare_lambda_function(self, env):
entry = env.declare_lambda_function(self.lambda_name, self.pos)
entry.doc = None
self.entry = entry
def declare_arguments(self, env):
for arg in self.args:
if not arg.name:
error(arg.pos, "Missing argument name")
if arg.needs_conversion:
arg.entry = env.declare_var(arg.name, arg.type, arg.pos)
if arg.type.is_pyobject:
arg.entry.init = "0"
else:
arg.entry = self.declare_argument(env, arg)
arg.entry.used = 1
arg.entry.is_self_arg = arg.is_self_arg
if arg.hdr_type:
if arg.is_self_arg or arg.is_type_arg or \
(arg.type.is_extension_type and not arg.hdr_type.is_extension_type):
arg.entry.is_declared_generic = 1
self.declare_python_arg(env, self.star_arg)
self.declare_python_arg(env, self.starstar_arg)
def declare_python_arg(self, env, arg):
if arg:
if env.directives['infer_types'] != False:
type = PyrexTypes.unspecified_type
else:
type = py_object_type
entry = env.declare_var(arg.name, type, arg.pos)
entry.used = 1
entry.init = "0"
entry.xdecref_cleanup = 1
arg.entry = entry
def analyse_expressions(self, env):
self.local_scope.directives = env.directives
self.analyse_default_values(env)
if self.needs_assignment_synthesis(env):
# Shouldn't we be doing this at the module level too?
self.synthesize_assignment_node(env)
def needs_assignment_synthesis(self, env, code=None):
if self.no_assignment_synthesis:
return False
# Should enable for module level as well, that will require more testing...
if self.entry.is_anonymous:
return True
if env.is_module_scope:
if code is None:
return env.directives['binding']
else:
return code.globalstate.directives['binding']
return env.is_py_class_scope or env.is_closure_scope
def synthesize_assignment_node(self, env):
import ExprNodes
genv = env
while genv.is_py_class_scope or genv.is_c_class_scope:
genv = genv.outer_scope
if genv.is_closure_scope:
rhs = ExprNodes.InnerFunctionNode(
self.pos, pymethdef_cname = self.entry.pymethdef_cname)
else:
rhs = ExprNodes.PyCFunctionNode(
self.pos, pymethdef_cname = self.entry.pymethdef_cname, binding = env.directives['binding'])
if env.is_py_class_scope:
if not self.is_staticmethod and not self.is_classmethod:
rhs.binding = True
else:
rhs.binding = False
self.assmt = SingleAssignmentNode(self.pos,
lhs = ExprNodes.NameNode(self.pos, name = self.name),
rhs = rhs)
self.assmt.analyse_declarations(env)
self.assmt.analyse_expressions(env)
def generate_function_header(self, code, with_pymethdef, proto_only=0):
arg_code_list = []
sig = self.entry.signature
if sig.has_dummy_arg or self.self_in_stararg:
arg_code_list.append(
"PyObject *%s" % Naming.self_cname)
for arg in self.args:
if not arg.is_generic:
if arg.is_self_arg or arg.is_type_arg:
arg_code_list.append("PyObject *%s" % arg.hdr_cname)
else:
arg_code_list.append(
arg.hdr_type.declaration_code(arg.hdr_cname))
if not self.entry.is_special and sig.method_flags() == [TypeSlots.method_noargs]:
arg_code_list.append("CYTHON_UNUSED PyObject *unused")
if (self.entry.scope.is_c_class_scope and self.entry.name == "__ipow__"):
arg_code_list.append("CYTHON_UNUSED PyObject *unused")
if sig.has_generic_args:
arg_code_list.append(
"PyObject *%s, PyObject *%s"
% (Naming.args_cname, Naming.kwds_cname))
arg_code = ", ".join(arg_code_list)
dc = self.return_type.declaration_code(self.entry.func_cname)
mf = " ".join(self.modifiers).upper()
if mf: mf += " "
header = "static %s%s(%s)" % (mf, dc, arg_code)
code.putln("%s; /*proto*/" % header)
if proto_only:
return
if (Options.docstrings and self.entry.doc and
not self.entry.scope.is_property_scope and
(not self.entry.is_special or self.entry.wrapperbase_cname)):
docstr = self.entry.doc
if docstr.is_unicode:
docstr = docstr.utf8encode()
code.putln(
'static char %s[] = "%s";' % (
self.entry.doc_cname,
split_string_literal(escape_byte_string(docstr))))
if self.entry.is_special:
code.putln(
"struct wrapperbase %s;" % self.entry.wrapperbase_cname)
if with_pymethdef:
code.put(
"static PyMethodDef %s = " %
self.entry.pymethdef_cname)
code.put_pymethoddef(self.entry, ";", allow_skip=False)
code.putln("%s {" % header)
def generate_argument_declarations(self, env, code):
for arg in self.args:
if arg.is_generic: # or arg.needs_conversion:
if arg.needs_conversion:
code.putln("PyObject *%s = 0;" % arg.hdr_cname)
elif not arg.entry.in_closure:
code.put_var_declaration(arg.entry)
def generate_keyword_list(self, code):
if self.signature_has_generic_args() and \
self.signature_has_nongeneric_args():
code.put(
"static PyObject **%s[] = {" %
Naming.pykwdlist_cname)
for arg in self.args:
if arg.is_generic:
pystring_cname = code.intern_identifier(arg.name)
code.put('&%s,' % pystring_cname)
code.putln("0};")
def generate_argument_parsing_code(self, env, code):
# Generate fast equivalent of PyArg_ParseTuple call for
# generic arguments, if any, including args/kwargs
if self.entry.signature.has_dummy_arg and not self.self_in_stararg:
# get rid of unused argument warning
code.putln("%s = %s;" % (Naming.self_cname, Naming.self_cname))
old_error_label = code.new_error_label()
our_error_label = code.error_label
end_label = code.new_label("argument_unpacking_done")
has_kwonly_args = self.num_kwonly_args > 0
has_star_or_kw_args = self.star_arg is not None \
or self.starstar_arg is not None or has_kwonly_args
for arg in self.args:
if not arg.type.is_pyobject:
done = arg.type.create_from_py_utility_code(env)
if not done: pass # will fail later
if not self.signature_has_generic_args():
if has_star_or_kw_args:
error(self.pos, "This method cannot have * or keyword arguments")
self.generate_argument_conversion_code(code)
elif not self.signature_has_nongeneric_args():
# func(*args) or func(**kw) or func(*args, **kw)
self.generate_stararg_copy_code(code)
else:
positional_args = []
kw_only_args = []
for arg in self.args:
arg_entry = arg.entry
if arg.is_generic:
if arg.default:
if not arg.is_self_arg and not arg.is_type_arg:
if arg.kw_only:
kw_only_args.append(arg)
else:
positional_args.append(arg)
elif arg.kw_only:
kw_only_args.append(arg)
elif not arg.is_self_arg and not arg.is_type_arg:
positional_args.append(arg)
self.generate_tuple_and_keyword_parsing_code(
positional_args, kw_only_args, end_label, code)
code.error_label = old_error_label
if code.label_used(our_error_label):
if not code.label_used(end_label):
code.put_goto(end_label)
code.put_label(our_error_label)
if has_star_or_kw_args:
self.generate_arg_decref(self.star_arg, code)
if self.starstar_arg:
if self.starstar_arg.entry.xdecref_cleanup:
code.put_var_xdecref_clear(self.starstar_arg.entry)
else:
code.put_var_decref_clear(self.starstar_arg.entry)
code.put_add_traceback(self.entry.qualified_name)
# The arguments are put into the closure one after the
# other, so when type errors are found, all references in
# the closure instance must be properly ref-counted to
# facilitate generic closure instance deallocation. In
# the case of an argument type error, it's best to just
# DECREF+clear the already handled references, as this
# frees their references as early as possible.
for arg in self.args:
if arg.type.is_pyobject and arg.entry.in_closure:
code.put_var_xdecref_clear(arg.entry)
if self.needs_closure:
code.put_decref(Naming.cur_scope_cname, self.local_scope.scope_class.type)
code.put_finish_refcount_context()
code.putln("return %s;" % self.error_value())
if code.label_used(end_label):
code.put_label(end_label)
# fix refnanny view on closure variables here, instead of
# doing it separately for each arg parsing special case
if self.star_arg and self.star_arg.entry.in_closure:
code.put_var_giveref(self.star_arg.entry)
if self.starstar_arg and self.starstar_arg.entry.in_closure:
code.put_var_giveref(self.starstar_arg.entry)
for arg in self.args:
if arg.type.is_pyobject and arg.entry.in_closure:
code.put_var_giveref(arg.entry)
def generate_arg_assignment(self, arg, item, code):
if arg.type.is_pyobject:
if arg.is_generic:
item = PyrexTypes.typecast(arg.type, PyrexTypes.py_object_type, item)
entry = arg.entry
if entry.in_closure:
code.put_incref(item, PyrexTypes.py_object_type)
code.putln("%s = %s;" % (entry.cname, item))
else:
func = arg.type.from_py_function
if func:
code.putln("%s = %s(%s); %s" % (
arg.entry.cname,
func,
item,
code.error_goto_if(arg.type.error_condition(arg.entry.cname), arg.pos)))
else:
error(arg.pos, "Cannot convert Python object argument to type '%s'" % arg.type)
def generate_arg_xdecref(self, arg, code):
if arg:
code.put_var_xdecref_clear(arg.entry)
def generate_arg_decref(self, arg, code):
if arg:
code.put_var_decref_clear(arg.entry)
def generate_stararg_copy_code(self, code):
if not self.star_arg:
code.globalstate.use_utility_code(raise_argtuple_invalid_utility_code)
code.putln("if (unlikely(PyTuple_GET_SIZE(%s) > 0)) {" %
Naming.args_cname)
code.put('__Pyx_RaiseArgtupleInvalid("%s", 1, 0, 0, PyTuple_GET_SIZE(%s)); return %s;' % (
self.name, Naming.args_cname, self.error_value()))
code.putln("}")
if self.starstar_arg:
if self.star_arg:
kwarg_check = "unlikely(%s)" % Naming.kwds_cname
else:
kwarg_check = "%s" % Naming.kwds_cname
else:
kwarg_check = "unlikely(%s) && unlikely(PyDict_Size(%s) > 0)" % (
Naming.kwds_cname, Naming.kwds_cname)
code.globalstate.use_utility_code(keyword_string_check_utility_code)
code.putln(
"if (%s && unlikely(!__Pyx_CheckKeywordStrings(%s, \"%s\", %d))) return %s;" % (
kwarg_check, Naming.kwds_cname, self.name,
bool(self.starstar_arg), self.error_value()))
if self.starstar_arg:
code.putln("%s = (%s) ? PyDict_Copy(%s) : PyDict_New();" % (
self.starstar_arg.entry.cname,
Naming.kwds_cname,
Naming.kwds_cname))
code.putln("if (unlikely(!%s)) return %s;" % (
self.starstar_arg.entry.cname, self.error_value()))
self.starstar_arg.entry.xdecref_cleanup = 0
code.put_gotref(self.starstar_arg.entry.cname)
if self.self_in_stararg:
# need to create a new tuple with 'self' inserted as first item
code.put("%s = PyTuple_New(PyTuple_GET_SIZE(%s)+1); if (unlikely(!%s)) " % (
self.star_arg.entry.cname,
Naming.args_cname,
self.star_arg.entry.cname))
if self.starstar_arg:
code.putln("{")
code.put_decref_clear(self.starstar_arg.entry.cname, py_object_type)
code.putln("return %s;" % self.error_value())
code.putln("}")
else:
code.putln("return %s;" % self.error_value())
code.put_gotref(self.star_arg.entry.cname)
code.put_incref(Naming.self_cname, py_object_type)
code.put_giveref(Naming.self_cname)
code.putln("PyTuple_SET_ITEM(%s, 0, %s);" % (
self.star_arg.entry.cname, Naming.self_cname))
temp = code.funcstate.allocate_temp(PyrexTypes.c_py_ssize_t_type, manage_ref=False)
code.putln("for (%s=0; %s < PyTuple_GET_SIZE(%s); %s++) {" % (
temp, temp, Naming.args_cname, temp))
code.putln("PyObject* item = PyTuple_GET_ITEM(%s, %s);" % (
Naming.args_cname, temp))
code.put_incref("item", py_object_type)
code.put_giveref("item")
code.putln("PyTuple_SET_ITEM(%s, %s+1, item);" % (
self.star_arg.entry.cname, temp))
code.putln("}")
code.funcstate.release_temp(temp)
self.star_arg.entry.xdecref_cleanup = 0
elif self.star_arg:
code.put_incref(Naming.args_cname, py_object_type)
code.putln("%s = %s;" % (
self.star_arg.entry.cname,
Naming.args_cname))
self.star_arg.entry.xdecref_cleanup = 0
def generate_tuple_and_keyword_parsing_code(self, positional_args,
kw_only_args, success_label, code):
argtuple_error_label = code.new_label("argtuple_error")
min_positional_args = self.num_required_args - self.num_required_kw_args
if len(self.args) > 0 and (self.args[0].is_self_arg or self.args[0].is_type_arg):
min_positional_args -= 1
max_positional_args = len(positional_args)
has_fixed_positional_count = not self.star_arg and \
min_positional_args == max_positional_args
if self.num_required_kw_args:
code.globalstate.use_utility_code(raise_keyword_required_utility_code)
if self.starstar_arg or self.star_arg:
self.generate_stararg_init_code(max_positional_args, code)
# --- optimised code when we receive keyword arguments
if self.num_required_kw_args:
likely_hint = "likely"
else:
likely_hint = "unlikely"
code.putln("if (%s(%s)) {" % (likely_hint, Naming.kwds_cname))
self.generate_keyword_unpacking_code(
min_positional_args, max_positional_args,
has_fixed_positional_count,
positional_args, kw_only_args, argtuple_error_label, code)
# --- optimised code when we do not receive any keyword arguments
if (self.num_required_kw_args and min_positional_args > 0) or min_positional_args == max_positional_args:
# Python raises arg tuple related errors first, so we must
# check the length here
if min_positional_args == max_positional_args and not self.star_arg:
compare = '!='
else:
compare = '<'
code.putln('} else if (PyTuple_GET_SIZE(%s) %s %d) {' % (
Naming.args_cname, compare, min_positional_args))
code.put_goto(argtuple_error_label)
if self.num_required_kw_args:
# pure error case: keywords required but not passed
if max_positional_args > min_positional_args and not self.star_arg:
code.putln('} else if (PyTuple_GET_SIZE(%s) > %d) {' % (
Naming.args_cname, max_positional_args))
code.put_goto(argtuple_error_label)
code.putln('} else {')
for i, arg in enumerate(kw_only_args):
if not arg.default:
pystring_cname = code.intern_identifier(arg.name)
# required keyword-only argument missing
code.put('__Pyx_RaiseKeywordRequired("%s", %s); ' % (
self.name,
pystring_cname))
code.putln(code.error_goto(self.pos))
break
elif min_positional_args == max_positional_args:
# parse the exact number of positional arguments from the
# args tuple
code.putln('} else {')
for i, arg in enumerate(positional_args):
item = "PyTuple_GET_ITEM(%s, %d)" % (Naming.args_cname, i)
self.generate_arg_assignment(arg, item, code)
self.generate_arg_default_assignments(code)
else:
# parse the positional arguments from the variable length
# args tuple
code.putln('} else {')
self.generate_arg_default_assignments(code)
code.putln('switch (PyTuple_GET_SIZE(%s)) {' % Naming.args_cname)
if self.star_arg:
code.putln('default:')
reversed_args = list(enumerate(positional_args))[::-1]
for i, arg in reversed_args:
if i >= min_positional_args-1:
if min_positional_args > 1:
code.putln('case %2d:' % (i+1)) # pure code beautification
else:
code.put('case %2d: ' % (i+1))
item = "PyTuple_GET_ITEM(%s, %d)" % (Naming.args_cname, i)
self.generate_arg_assignment(arg, item, code)
if min_positional_args == 0:
code.put('case 0: ')
code.putln('break;')
if self.star_arg:
if min_positional_args:
for i in range(min_positional_args-1, -1, -1):
code.putln('case %2d:' % i)
code.put_goto(argtuple_error_label)
else:
code.put('default: ')
code.put_goto(argtuple_error_label)
code.putln('}')
code.putln('}')
if code.label_used(argtuple_error_label):
code.put_goto(success_label)
code.put_label(argtuple_error_label)
code.globalstate.use_utility_code(raise_argtuple_invalid_utility_code)
code.put('__Pyx_RaiseArgtupleInvalid("%s", %d, %d, %d, PyTuple_GET_SIZE(%s)); ' % (
self.name, has_fixed_positional_count,
min_positional_args, max_positional_args,
Naming.args_cname))
code.putln(code.error_goto(self.pos))
def generate_arg_default_assignments(self, code):
for arg in self.args:
if arg.is_generic and arg.default:
code.putln(
"%s = %s;" % (
arg.entry.cname,
arg.calculate_default_value_code(code)))
def generate_stararg_init_code(self, max_positional_args, code):
if self.starstar_arg:
self.starstar_arg.entry.xdecref_cleanup = 0
code.putln('%s = PyDict_New(); if (unlikely(!%s)) return %s;' % (
self.starstar_arg.entry.cname,
self.starstar_arg.entry.cname,
self.error_value()))
code.put_gotref(self.starstar_arg.entry.cname)
if self.star_arg:
self.star_arg.entry.xdecref_cleanup = 0
code.putln('if (PyTuple_GET_SIZE(%s) > %d) {' % (
Naming.args_cname,
max_positional_args))
code.putln('%s = PyTuple_GetSlice(%s, %d, PyTuple_GET_SIZE(%s));' % (
self.star_arg.entry.cname, Naming.args_cname,
max_positional_args, Naming.args_cname))
code.putln("if (unlikely(!%s)) {" % self.star_arg.entry.cname)
if self.starstar_arg:
code.put_decref_clear(self.starstar_arg.entry.cname, py_object_type)
if self.needs_closure:
code.put_decref(Naming.cur_scope_cname, self.local_scope.scope_class.type)
code.put_finish_refcount_context()
code.putln('return %s;' % self.error_value())
code.putln('}')
code.put_gotref(self.star_arg.entry.cname)
code.putln('} else {')
code.put("%s = %s; " % (self.star_arg.entry.cname, Naming.empty_tuple))
code.put_incref(Naming.empty_tuple, py_object_type)
code.putln('}')
def generate_keyword_unpacking_code(self, min_positional_args, max_positional_args,
has_fixed_positional_count, positional_args,
kw_only_args, argtuple_error_label, code):
all_args = tuple(positional_args) + tuple(kw_only_args)
max_args = len(all_args)
code.putln("Py_ssize_t kw_args = PyDict_Size(%s);" %
Naming.kwds_cname)
# the 'values' array collects borrowed references to arguments
# before doing any type coercion etc.
code.putln("PyObject* values[%d] = {%s};" % (
max_args, ','.join('0'*max_args)))
# assign borrowed Python default values to the values array,
# so that they can be overwritten by received arguments below
for i, arg in enumerate(all_args):
if arg.default and arg.type.is_pyobject:
default_value = arg.calculate_default_value_code(code)
code.putln('values[%d] = %s;' % (i, arg.type.as_pyobject(default_value)))
# parse the args tuple and check that it's not too long
code.putln('switch (PyTuple_GET_SIZE(%s)) {' % Naming.args_cname)
if self.star_arg:
code.putln('default:')
for i in range(max_positional_args-1, -1, -1):
code.put('case %2d: ' % (i+1))
code.putln("values[%d] = PyTuple_GET_ITEM(%s, %d);" % (
i, Naming.args_cname, i))
code.putln('case 0: break;')
if not self.star_arg:
code.put('default: ') # more arguments than allowed
code.put_goto(argtuple_error_label)
code.putln('}')
# now fill up the positional/required arguments with values
# from the kw dict
if self.num_required_args or max_positional_args > 0:
last_required_arg = -1
for i, arg in enumerate(all_args):
if not arg.default:
last_required_arg = i
if last_required_arg < max_positional_args:
last_required_arg = max_positional_args-1
num_required_args = self.num_required_args
if max_positional_args > 0:
code.putln('switch (PyTuple_GET_SIZE(%s)) {' % Naming.args_cname)
for i, arg in enumerate(all_args[:last_required_arg+1]):
if max_positional_args > 0 and i <= max_positional_args:
if self.star_arg and i == max_positional_args:
code.putln('default:')
else:
code.putln('case %2d:' % i)
pystring_cname = code.intern_identifier(arg.name)
if arg.default:
if arg.kw_only:
# handled separately below
continue
code.putln('if (kw_args > 0) {')
code.putln('PyObject* value = PyDict_GetItem(%s, %s);' % (
Naming.kwds_cname, pystring_cname))
code.putln('if (value) { values[%d] = value; kw_args--; }' % i)
code.putln('}')
else:
num_required_args -= 1
code.putln('values[%d] = PyDict_GetItem(%s, %s);' % (
i, Naming.kwds_cname, pystring_cname))
code.putln('if (likely(values[%d])) kw_args--;' % i);
if i < min_positional_args:
if i == 0:
# special case: we know arg 0 is missing
code.put('else ')
code.put_goto(argtuple_error_label)
else:
# print the correct number of values (args or
# kwargs) that were passed into positional
# arguments up to this point
code.putln('else {')
code.globalstate.use_utility_code(raise_argtuple_invalid_utility_code)
code.put('__Pyx_RaiseArgtupleInvalid("%s", %d, %d, %d, %d); ' % (
self.name, has_fixed_positional_count,
min_positional_args, max_positional_args, i))
code.putln(code.error_goto(self.pos))
code.putln('}')
elif arg.kw_only:
code.putln('else {')
code.put('__Pyx_RaiseKeywordRequired("%s", %s); ' %(
self.name, pystring_cname))
code.putln(code.error_goto(self.pos))
code.putln('}')
if max_positional_args > 0:
code.putln('}')
if kw_only_args and not self.starstar_arg:
# unpack optional keyword-only arguments
# checking for interned strings in a dict is faster than iterating
# but it's too likely that we must iterate if we expect **kwargs
optional_args = []
for i, arg in enumerate(all_args[max_positional_args:]):
if not arg.kw_only or not arg.default:
continue
optional_args.append((i+max_positional_args, arg))
if optional_args:
# this mimics an unrolled loop so that we can "break" out of it
code.putln('while (kw_args > 0) {')
code.putln('PyObject* value;')
for i, arg in optional_args:
pystring_cname = code.intern_identifier(arg.name)
code.putln(
'value = PyDict_GetItem(%s, %s);' % (
Naming.kwds_cname, pystring_cname))
code.putln(
'if (value) { values[%d] = value; if (!(--kw_args)) break; }' % i)
code.putln('break;')
code.putln('}')
code.putln('if (unlikely(kw_args > 0)) {')
# non-positional/-required kw args left in dict: default args,
# kw-only args, **kwargs or error
#
# This is sort of a catch-all: except for checking required
# arguments, this will always do the right thing for unpacking
# keyword arguments, so that we can concentrate on optimising
# common cases above.
if max_positional_args == 0:
pos_arg_count = "0"
elif self.star_arg:
code.putln("const Py_ssize_t used_pos_args = (PyTuple_GET_SIZE(%s) < %d) ? PyTuple_GET_SIZE(%s) : %d;" % (
Naming.args_cname, max_positional_args,
Naming.args_cname, max_positional_args))
pos_arg_count = "used_pos_args"
else:
pos_arg_count = "PyTuple_GET_SIZE(%s)" % Naming.args_cname
code.globalstate.use_utility_code(parse_keywords_utility_code)
code.put(
'if (unlikely(__Pyx_ParseOptionalKeywords(%s, %s, %s, values, %s, "%s") < 0)) ' % (
Naming.kwds_cname,
Naming.pykwdlist_cname,
self.starstar_arg and self.starstar_arg.entry.cname or '0',
pos_arg_count,
self.name))
code.putln(code.error_goto(self.pos))
code.putln('}')
# convert arg values to their final type and assign them
for i, arg in enumerate(all_args):
if arg.default and not arg.type.is_pyobject:
code.putln("if (values[%d]) {" % i)
self.generate_arg_assignment(arg, "values[%d]" % i, code)
if arg.default and not arg.type.is_pyobject:
code.putln('} else {')
code.putln(
"%s = %s;" % (
arg.entry.cname,
arg.calculate_default_value_code(code)))
code.putln('}')
def generate_argument_conversion_code(self, code):
# Generate code to convert arguments from signature type to
# declared type, if needed. Also copies signature arguments
# into closure fields.
for arg in self.args:
if arg.needs_conversion:
self.generate_arg_conversion(arg, code)
elif arg.entry.in_closure:
if arg.type.is_pyobject:
code.put_incref(arg.hdr_cname, py_object_type)
code.putln('%s = %s;' % (arg.entry.cname, arg.hdr_cname))
def generate_arg_conversion(self, arg, code):
# Generate conversion code for one argument.
old_type = arg.hdr_type
new_type = arg.type
if old_type.is_pyobject:
if arg.default:
code.putln("if (%s) {" % arg.hdr_cname)
else:
code.putln("assert(%s); {" % arg.hdr_cname)
self.generate_arg_conversion_from_pyobject(arg, code)
code.putln("}")
elif new_type.is_pyobject:
self.generate_arg_conversion_to_pyobject(arg, code)
else:
if new_type.assignable_from(old_type):
code.putln(
"%s = %s;" % (arg.entry.cname, arg.hdr_cname))
else:
error(arg.pos,
"Cannot convert 1 argument from '%s' to '%s'" %
(old_type, new_type))
def generate_arg_conversion_from_pyobject(self, arg, code):
new_type = arg.type
func = new_type.from_py_function
# copied from CoerceFromPyTypeNode
if func:
lhs = arg.entry.cname
rhs = "%s(%s)" % (func, arg.hdr_cname)
if new_type.is_enum:
rhs = PyrexTypes.typecast(new_type, PyrexTypes.c_long_type, rhs)
code.putln("%s = %s; %s" % (
lhs,
rhs,
code.error_goto_if(new_type.error_condition(arg.entry.cname), arg.pos)))
else:
error(arg.pos,
"Cannot convert Python object argument to type '%s'"
% new_type)
def generate_arg_conversion_to_pyobject(self, arg, code):
old_type = arg.hdr_type
func = old_type.to_py_function
if func:
code.putln("%s = %s(%s); %s" % (
arg.entry.cname,
func,
arg.hdr_cname,
code.error_goto_if_null(arg.entry.cname, arg.pos)))
code.put_var_gotref(arg.entry)
else:
error(arg.pos,
"Cannot convert argument of type '%s' to Python object"
% old_type)
def generate_argument_type_tests(self, code):
# Generate type tests for args whose signature
# type is PyObject * and whose declared type is
# a subtype thereof.
for arg in self.args:
if arg.needs_type_test:
self.generate_arg_type_test(arg, code)
elif not arg.accept_none and arg.type.is_pyobject:
self.generate_arg_none_check(arg, code)
def error_value(self):
return self.entry.signature.error_value
def caller_will_check_exceptions(self):
return 1
class GeneratorDefNode(DefNode):
# Generator DefNode.
#
# gbody GeneratorBodyDefNode
#
is_generator = True
needs_closure = True
child_attrs = DefNode.child_attrs + ["gbody"]
def __init__(self, **kwargs):
# XXX: don't actually needs a body
kwargs['body'] = StatListNode(kwargs['pos'], stats=[])
super(GeneratorDefNode, self).__init__(**kwargs)
def analyse_declarations(self, env):
super(GeneratorDefNode, self).analyse_declarations(env)
self.gbody.local_scope = self.local_scope
self.gbody.analyse_declarations(env)
def generate_function_body(self, env, code):
body_cname = self.gbody.entry.func_cname
generator_cname = '%s->%s' % (Naming.cur_scope_cname, Naming.obj_base_cname)
code.putln('%s.resume_label = 0;' % generator_cname)
code.putln('%s.body = (__pyx_generator_body_t) %s;' % (generator_cname, body_cname))
code.put_giveref(Naming.cur_scope_cname)
code.put_finish_refcount_context()
code.putln("return (PyObject *) %s;" % Naming.cur_scope_cname);
def generate_function_definitions(self, env, code):
from ExprNodes import generator_utility_code
env.use_utility_code(generator_utility_code)
self.gbody.generate_function_header(code, proto=True)
super(GeneratorDefNode, self).generate_function_definitions(env, code)
self.gbody.generate_function_definitions(env, code)
class GeneratorBodyDefNode(DefNode):
# Generator body DefNode.
#
is_generator_body = True
def __init__(self, pos=None, name=None, body=None):
super(GeneratorBodyDefNode, self).__init__(pos=pos, body=body, name=name, doc=None,
args=[],
star_arg=None, starstar_arg=None)
def declare_generator_body(self, env):
prefix = env.next_id(env.scope_prefix)
name = env.next_id('generator')
entry = env.declare_var(prefix + name, py_object_type, self.pos, visibility='private')
entry.func_cname = Naming.genbody_prefix + prefix + name
entry.qualified_name = EncodedString(self.name)
self.entry = entry
def analyse_declarations(self, env):
self.analyse_argument_types(env)
self.declare_generator_body(env)
def generate_function_header(self, code, proto=False):
header = "static PyObject *%s(%s, PyObject *%s)" % (
self.entry.func_cname,
self.local_scope.scope_class.type.declaration_code(Naming.cur_scope_cname),
Naming.sent_value_cname)
if proto:
code.putln('%s; /* proto */' % header)
else:
code.putln('%s /* generator body */\n{' % header);
def generate_function_definitions(self, env, code):
lenv = self.local_scope
# Generate closure function definitions
self.body.generate_function_definitions(lenv, code)
# Generate C code for header and body of function
code.enter_cfunc_scope()
code.return_from_error_cleanup_label = code.new_label()
# ----- Top-level constants used by this function
code.mark_pos(self.pos)
self.generate_cached_builtins_decls(lenv, code)
# ----- Function header
code.putln("")
self.generate_function_header(code)
# ----- Local variables
code.putln("PyObject *%s = NULL;" % Naming.retval_cname)
tempvardecl_code = code.insertion_point()
code.put_declare_refcount_context()
code.put_setup_refcount_context(self.entry.name)
# ----- Resume switch point.
code.funcstate.init_closure_temps(lenv.scope_class.type.scope)
resume_code = code.insertion_point()
first_run_label = code.new_label('first_run')
code.use_label(first_run_label)
code.put_label(first_run_label)
code.putln('%s' %
(code.error_goto_if_null(Naming.sent_value_cname, self.pos)))
# ----- Function body
self.generate_function_body(env, code)
code.putln('PyErr_SetNone(PyExc_StopIteration); %s' % code.error_goto(self.pos))
# ----- Error cleanup
if code.error_label in code.labels_used:
code.put_goto(code.return_label)
code.put_label(code.error_label)
for cname, type in code.funcstate.all_managed_temps():
code.put_xdecref(cname, type)
code.put_add_traceback(self.entry.qualified_name)
# ----- Non-error return cleanup
code.put_label(code.return_label)
code.put_xdecref(Naming.retval_cname, py_object_type)
code.putln('%s->%s.resume_label = -1;' % (Naming.cur_scope_cname, Naming.obj_base_cname))
code.put_finish_refcount_context()
code.putln('return NULL;');
code.putln("}")
# ----- Go back and insert temp variable declarations
tempvardecl_code.put_temp_declarations(code.funcstate)
# ----- Generator resume code
resume_code.putln("switch (%s->%s.resume_label) {" % (Naming.cur_scope_cname, Naming.obj_base_cname));
resume_code.putln("case 0: goto %s;" % first_run_label)
from ParseTreeTransforms import YieldNodeCollector
collector = YieldNodeCollector()
collector.visitchildren(self)
for yield_expr in collector.yields:
resume_code.putln("case %d: goto %s;" % (yield_expr.label_num, yield_expr.label_name));
resume_code.putln("default: /* CPython raises the right error here */");
resume_code.put_finish_refcount_context()
resume_code.putln("return NULL;");
resume_code.putln("}");
code.exit_cfunc_scope()
class OverrideCheckNode(StatNode):
# A Node for dispatching to the def method if it
# is overriden.
#
# py_func
#
# args
# func_temp
# body
child_attrs = ['body']
body = None
def analyse_expressions(self, env):
self.args = env.arg_entries
if self.py_func.is_module_scope:
first_arg = 0
else:
first_arg = 1
import ExprNodes
self.func_node = ExprNodes.RawCNameExprNode(self.pos, py_object_type)
call_tuple = ExprNodes.TupleNode(self.pos, args=[ExprNodes.NameNode(self.pos, name=arg.name) for arg in self.args[first_arg:]])
call_node = ExprNodes.SimpleCallNode(self.pos,
function=self.func_node,
args=[ExprNodes.NameNode(self.pos, name=arg.name) for arg in self.args[first_arg:]])
self.body = ReturnStatNode(self.pos, value=call_node)
self.body.analyse_expressions(env)
def generate_execution_code(self, code):
interned_attr_cname = code.intern_identifier(self.py_func.entry.name)
# Check to see if we are an extension type
if self.py_func.is_module_scope:
self_arg = "((PyObject *)%s)" % Naming.module_cname
else:
self_arg = "((PyObject *)%s)" % self.args[0].cname
code.putln("/* Check if called by wrapper */")
code.putln("if (unlikely(%s)) ;" % Naming.skip_dispatch_cname)
code.putln("/* Check if overriden in Python */")
if self.py_func.is_module_scope:
code.putln("else {")
else:
code.putln("else if (unlikely(Py_TYPE(%s)->tp_dictoffset != 0)) {" % self_arg)
func_node_temp = code.funcstate.allocate_temp(py_object_type, manage_ref=True)
self.func_node.set_cname(func_node_temp)
# need to get attribute manually--scope would return cdef method
err = code.error_goto_if_null(func_node_temp, self.pos)
code.putln("%s = PyObject_GetAttr(%s, %s); %s" % (
func_node_temp, self_arg, interned_attr_cname, err))
code.put_gotref(func_node_temp)
is_builtin_function_or_method = "PyCFunction_Check(%s)" % func_node_temp
is_overridden = "(PyCFunction_GET_FUNCTION(%s) != (void *)&%s)" % (
func_node_temp, self.py_func.entry.func_cname)
code.putln("if (!%s || %s) {" % (is_builtin_function_or_method, is_overridden))
self.body.generate_execution_code(code)
code.putln("}")
code.put_decref_clear(func_node_temp, PyrexTypes.py_object_type)
code.funcstate.release_temp(func_node_temp)
code.putln("}")
class ClassDefNode(StatNode, BlockNode):
pass
class PyClassDefNode(ClassDefNode):
# A Python class definition.
#
# name EncodedString Name of the class
# doc string or None
# body StatNode Attribute definition code
# entry Symtab.Entry
# scope PyClassScope
# decorators [DecoratorNode] list of decorators or None
#
# The following subnodes are constructed internally:
#
# dict DictNode Class dictionary or Py3 namespace
# classobj ClassNode Class object
# target NameNode Variable to assign class object to
child_attrs = ["body", "dict", "metaclass", "mkw", "bases", "classobj", "target"]
decorators = None
py3_style_class = False # Python3 style class (bases+kwargs)
def __init__(self, pos, name, bases, doc, body, decorators = None,
keyword_args = None, starstar_arg = None):
StatNode.__init__(self, pos)
self.name = name
self.doc = doc
self.body = body
self.decorators = decorators
import ExprNodes
if self.doc and Options.docstrings:
doc = embed_position(self.pos, self.doc)
doc_node = ExprNodes.StringNode(pos, value = doc)
else:
doc_node = None
if keyword_args or starstar_arg:
self.py3_style_class = True
self.bases = bases
self.metaclass = None
if keyword_args and not starstar_arg:
for i, item in list(enumerate(keyword_args.key_value_pairs))[::-1]:
if item.key.value == 'metaclass':
if self.metaclass is not None:
error(item.pos, "keyword argument 'metaclass' passed multiple times")
# special case: we already know the metaclass,
# so we don't need to do the "build kwargs,
# find metaclass" dance at runtime
self.metaclass = item.value
del keyword_args.key_value_pairs[i]
if starstar_arg or (keyword_args and keyword_args.key_value_pairs):
self.mkw = ExprNodes.KeywordArgsNode(
pos, keyword_args = keyword_args, starstar_arg = starstar_arg)
else:
self.mkw = ExprNodes.NullNode(pos)
if self.metaclass is None:
self.metaclass = ExprNodes.PyClassMetaclassNode(
pos, mkw = self.mkw, bases = self.bases)
self.dict = ExprNodes.PyClassNamespaceNode(pos, name = name,
doc = doc_node, metaclass = self.metaclass, bases = self.bases,
mkw = self.mkw)
self.classobj = ExprNodes.Py3ClassNode(pos, name = name,
bases = self.bases, dict = self.dict, doc = doc_node,
metaclass = self.metaclass, mkw = self.mkw)
else:
self.dict = ExprNodes.DictNode(pos, key_value_pairs = [])
self.metaclass = None
self.mkw = None
self.bases = None
self.classobj = ExprNodes.ClassNode(pos, name = name,
bases = bases, dict = self.dict, doc = doc_node)
self.target = ExprNodes.NameNode(pos, name = name)
def as_cclass(self):
"""
Return this node as if it were declared as an extension class
"""
if self.py3_style_class:
error(self.classobj.pos, "Python3 style class could not be represented as C class")
return
bases = self.classobj.bases.args
if len(bases) == 0:
base_class_name = None
base_class_module = None
elif len(bases) == 1:
base = bases[0]
path = []
from ExprNodes import AttributeNode, NameNode
while isinstance(base, AttributeNode):
path.insert(0, base.attribute)
base = base.obj
if isinstance(base, NameNode):
path.insert(0, base.name)
base_class_name = path[-1]
if len(path) > 1:
base_class_module = u'.'.join(path[:-1])
else:
base_class_module = None
else:
error(self.classobj.bases.args.pos, "Invalid base class")
else:
error(self.classobj.bases.args.pos, "C class may only have one base class")
return None
return CClassDefNode(self.pos,
visibility = 'private',
module_name = None,
class_name = self.name,
base_class_module = base_class_module,
base_class_name = base_class_name,
decorators = self.decorators,
body = self.body,
in_pxd = False,
doc = self.doc)
def create_scope(self, env):
genv = env
while genv.is_py_class_scope or genv.is_c_class_scope:
genv = genv.outer_scope
cenv = self.scope = PyClassScope(name = self.name, outer_scope = genv)
return cenv
def analyse_declarations(self, env):
self.target.analyse_target_declaration(env)
cenv = self.create_scope(env)
cenv.directives = env.directives
cenv.class_obj_cname = self.target.entry.cname
self.body.analyse_declarations(cenv)
def analyse_expressions(self, env):
if self.py3_style_class:
self.bases.analyse_expressions(env)
self.metaclass.analyse_expressions(env)
self.mkw.analyse_expressions(env)
self.dict.analyse_expressions(env)
self.classobj.analyse_expressions(env)
genv = env.global_scope()
cenv = self.scope
self.body.analyse_expressions(cenv)
self.target.analyse_target_expression(env, self.classobj)
def generate_function_definitions(self, env, code):
self.generate_lambda_definitions(self.scope, code)
self.body.generate_function_definitions(self.scope, code)
def generate_execution_code(self, code):
code.pyclass_stack.append(self)
cenv = self.scope
if self.py3_style_class:
self.bases.generate_evaluation_code(code)
self.mkw.generate_evaluation_code(code)
self.metaclass.generate_evaluation_code(code)
self.dict.generate_evaluation_code(code)
cenv.namespace_cname = cenv.class_obj_cname = self.dict.result()
self.body.generate_execution_code(code)
self.classobj.generate_evaluation_code(code)
cenv.namespace_cname = cenv.class_obj_cname = self.classobj.result()
self.target.generate_assignment_code(self.classobj, code)
self.dict.generate_disposal_code(code)
self.dict.free_temps(code)
if self.py3_style_class:
self.mkw.generate_disposal_code(code)
self.mkw.free_temps(code)
self.metaclass.generate_disposal_code(code)
self.metaclass.free_temps(code)
self.bases.generate_disposal_code(code)
self.bases.free_temps(code)
code.pyclass_stack.pop()
class CClassDefNode(ClassDefNode):
# An extension type definition.
#
# visibility 'private' or 'public' or 'extern'
# typedef_flag boolean
# api boolean
# module_name string or None For import of extern type objects
# class_name string Unqualified name of class
# as_name string or None Name to declare as in this scope
# base_class_module string or None Module containing the base class
# base_class_name string or None Name of the base class
# objstruct_name string or None Specified C name of object struct
# typeobj_name string or None Specified C name of type object
# in_pxd boolean Is in a .pxd file
# decorators [DecoratorNode] list of decorators or None
# doc string or None
# body StatNode or None
# entry Symtab.Entry
# base_type PyExtensionType or None
# buffer_defaults_node DictNode or None Declares defaults for a buffer
# buffer_defaults_pos
child_attrs = ["body"]
buffer_defaults_node = None
buffer_defaults_pos = None
typedef_flag = False
api = False
objstruct_name = None
typeobj_name = None
decorators = None
shadow = False
def buffer_defaults(self, env):
if not hasattr(self, '_buffer_defaults'):
import Buffer
if self.buffer_defaults_node:
self._buffer_defaults = Buffer.analyse_buffer_options(
self.buffer_defaults_pos,
env, [], self.buffer_defaults_node,
need_complete=False)
else:
self._buffer_defaults = None
return self._buffer_defaults
def declare(self, env):
if self.module_name and self.visibility != 'extern':
module_path = self.module_name.split(".")
home_scope = env.find_imported_module(module_path, self.pos)
if not home_scope:
return None
else:
home_scope = env
self.entry = home_scope.declare_c_class(
name = self.class_name,
pos = self.pos,
defining = 0,
implementing = 0,
module_name = self.module_name,
base_type = None,
objstruct_cname = self.objstruct_name,
typeobj_cname = self.typeobj_name,
visibility = self.visibility,
typedef_flag = self.typedef_flag,
api = self.api,
buffer_defaults = self.buffer_defaults(env),
shadow = self.shadow)
def analyse_declarations(self, env):
#print "CClassDefNode.analyse_declarations:", self.class_name
#print "...visibility =", self.visibility
#print "...module_name =", self.module_name
if env.in_cinclude and not self.objstruct_name:
error(self.pos, "Object struct name specification required for "
"C class defined in 'extern from' block")
self.base_type = None
# Now that module imports are cached, we need to
# import the modules for extern classes.
if self.module_name:
self.module = None
for module in env.cimported_modules: