Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
C
cython
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Xavier Thompson
cython
Commits
4f208e13
Commit
4f208e13
authored
Nov 13, 2010
by
Mark Florisson
Browse files
Options
Browse Files
Download
Plain Diff
branch merge
parents
841e7703
d591804e
Changes
46
Hide whitespace changes
Inline
Side-by-side
Showing
46 changed files
with
1637 additions
and
694 deletions
+1637
-694
Cython/Build/Dependencies.py
Cython/Build/Dependencies.py
+30
-14
Cython/Build/Inline.py
Cython/Build/Inline.py
+83
-18
Cython/Compiler/Builtin.py
Cython/Compiler/Builtin.py
+252
-155
Cython/Compiler/Code.py
Cython/Compiler/Code.py
+11
-5
Cython/Compiler/ExprNodes.py
Cython/Compiler/ExprNodes.py
+283
-68
Cython/Compiler/Main.py
Cython/Compiler/Main.py
+2
-0
Cython/Compiler/Nodes.py
Cython/Compiler/Nodes.py
+36
-120
Cython/Compiler/Optimize.py
Cython/Compiler/Optimize.py
+77
-151
Cython/Compiler/ParseTreeTransforms.py
Cython/Compiler/ParseTreeTransforms.py
+60
-34
Cython/Compiler/Parsing.pxd
Cython/Compiler/Parsing.pxd
+2
-0
Cython/Compiler/Parsing.py
Cython/Compiler/Parsing.py
+46
-25
Cython/Compiler/PyrexTypes.py
Cython/Compiler/PyrexTypes.py
+1
-9
Cython/Compiler/Scanning.py
Cython/Compiler/Scanning.py
+10
-3
Cython/Compiler/Symtab.py
Cython/Compiler/Symtab.py
+43
-12
Cython/Compiler/TypeSlots.py
Cython/Compiler/TypeSlots.py
+16
-4
Cython/Compiler/UtilNodes.py
Cython/Compiler/UtilNodes.py
+3
-0
Cython/Compiler/Visitor.pxd
Cython/Compiler/Visitor.pxd
+17
-4
Cython/Compiler/Visitor.py
Cython/Compiler/Visitor.py
+11
-8
Cython/Plex/Scanners.pxd
Cython/Plex/Scanners.pxd
+5
-6
Cython/Plex/Scanners.py
Cython/Plex/Scanners.py
+9
-6
Cython/Shadow.py
Cython/Shadow.py
+4
-0
Cython/TestUtils.py
Cython/TestUtils.py
+9
-1
runtests.py
runtests.py
+17
-10
setup.py
setup.py
+18
-4
tests/bugs.txt
tests/bugs.txt
+4
-0
tests/build/basic_cythonize.srctree
tests/build/basic_cythonize.srctree
+1
-1
tests/build/inline_distutils.srctree
tests/build/inline_distutils.srctree
+1
-1
tests/compile/builtinfuncs.pyx
tests/compile/builtinfuncs.pyx
+1
-0
tests/errors/cdef_syntax.pyx
tests/errors/cdef_syntax.pyx
+1
-1
tests/errors/cpdef_syntax.pyx
tests/errors/cpdef_syntax.pyx
+1
-1
tests/errors/e2_packedstruct_T290.pyx
tests/errors/e2_packedstruct_T290.pyx
+1
-1
tests/run/bound_builtin_methods_T589.pyx
tests/run/bound_builtin_methods_T589.pyx
+35
-0
tests/run/builtin_next.pyx
tests/run/builtin_next.pyx
+92
-0
tests/run/builtin_pow.pyx
tests/run/builtin_pow.pyx
+41
-0
tests/run/consts.pyx
tests/run/consts.pyx
+2
-2
tests/run/cython3.pyx
tests/run/cython3.pyx
+51
-0
tests/run/inplace.pyx
tests/run/inplace.pyx
+132
-5
tests/run/int_literals.pyx
tests/run/int_literals.pyx
+56
-13
tests/run/list.pyx
tests/run/list.pyx
+15
-0
tests/run/metaclass.pyx
tests/run/metaclass.pyx
+30
-5
tests/run/pure_mode_cmethod_inheritance_T583.pxd
tests/run/pure_mode_cmethod_inheritance_T583.pxd
+19
-0
tests/run/pure_mode_cmethod_inheritance_T583.py
tests/run/pure_mode_cmethod_inheritance_T583.py
+74
-0
tests/run/set.pyx
tests/run/set.pyx
+9
-0
tests/run/type_inference.pyx
tests/run/type_inference.pyx
+1
-2
tests/run/unicode_indexing.pyx
tests/run/unicode_indexing.pyx
+0
-1
tests/run/unicodemethods.pyx
tests/run/unicodemethods.pyx
+25
-4
No files found.
Cython/
Compiler
/Dependencies.py
→
Cython/
Build
/Dependencies.py
View file @
4f208e13
...
...
@@ -94,7 +94,7 @@ class DistutilsInfo(object):
value
=
[
tuple
(
macro
.
split
(
'='
))
for
macro
in
value
]
self
.
values
[
key
]
=
value
elif
exn
is
not
None
:
for
key
in
self
.
distutils_settings
:
for
key
in
distutils_settings
:
if
key
in
(
'name'
,
'sources'
):
pass
value
=
getattr
(
exn
,
key
,
None
)
...
...
@@ -154,19 +154,33 @@ def strip_string_literals(code, prefix='__Pyx_L'):
in_quote
=
False
raw
=
False
while
True
:
hash_mark
=
code
.
find
(
'#'
,
q
)
single_q
=
code
.
find
(
"'"
,
q
)
double_q
=
code
.
find
(
'"'
,
q
)
q
=
min
(
single_q
,
double_q
)
if
q
==
-
1
:
q
=
max
(
single_q
,
double_q
)
if
q
==
-
1
:
if
in_quote
:
counter
+=
1
label
=
"'%s%s"
%
(
prefix
,
counter
)
literals
[
label
]
=
code
[
start
:]
new_code
.
append
(
label
)
else
:
new_code
.
append
(
code
[
start
:])
# Process comment.
if
-
1
<
hash_mark
and
(
hash_mark
<
q
or
q
==
-
1
):
end
=
code
.
find
(
'
\
n
'
,
hash_mark
)
if
end
==
-
1
:
end
=
None
new_code
.
append
(
code
[
start
:
hash_mark
+
1
])
counter
+=
1
label
=
"%s%s"
%
(
prefix
,
counter
)
literals
[
label
]
=
code
[
hash_mark
+
1
:
end
]
new_code
.
append
(
label
)
if
end
is
None
:
break
q
=
end
start
=
q
# We're done.
elif
q
==
-
1
:
new_code
.
append
(
code
[
start
:])
break
# Try to close the quote.
elif
in_quote
:
if
code
[
q
-
1
]
==
'
\
\
'
:
k
=
2
...
...
@@ -179,12 +193,14 @@ def strip_string_literals(code, prefix='__Pyx_L'):
counter
+=
1
label
=
"%s%s"
%
(
prefix
,
counter
)
literals
[
label
]
=
code
[
start
+
len
(
in_quote
):
q
]
new_code
.
append
(
"
'%s'"
%
label
)
new_code
.
append
(
"
%s%s%s"
%
(
in_quote
,
label
,
in_quote
)
)
q
+=
len
(
in_quote
)
start
=
q
in_quote
=
False
start
=
q
else
:
q
+=
1
# Open the quote.
else
:
raw
=
False
if
len
(
code
)
>=
q
+
3
and
(
code
[
q
+
1
]
==
code
[
q
]
==
code
[
q
+
2
]):
...
...
@@ -202,13 +218,13 @@ def strip_string_literals(code, prefix='__Pyx_L'):
return
""
.
join
(
new_code
),
literals
def
parse_dependencies
(
source_filename
):
# Actual parsing is way to slow, so we use regular expressions.
# The only catch is that we must strip comments and string
# literals ahead of time.
source
=
Utils
.
open_source_file
(
source_filename
,
"rU"
).
read
()
distutils_info
=
DistutilsInfo
(
source
)
source
=
re
.
sub
(
'#.*'
,
''
,
source
)
source
,
literals
=
strip_string_literals
(
source
)
source
=
source
.
replace
(
'
\
\
\
n
'
,
' '
)
if
'
\
t
'
in
source
:
...
...
@@ -389,8 +405,8 @@ def create_extension_list(patterns, ctx=None, aliases=None):
continue
template
=
pattern
name
=
template
.
name
base
=
DistutilsInfo
(
template
)
exn_type
=
t
ype
(
template
)
base
=
DistutilsInfo
(
exn
=
template
)
exn_type
=
t
emplate
.
__class__
else
:
raise
TypeError
(
pattern
)
for
file
in
glob
(
filepattern
):
...
...
Cython/Build/Inline.py
View file @
4f208e13
#no doctest
print
"Warning: Using prototype cython.inline code..."
import
tempfile
...
...
@@ -8,14 +9,15 @@ try:
except
ImportError
:
import
md5
as
hashlib
from
distutils.dist
import
Distribution
from
Cython.Distutils.extension
import
Extension
from
Cython.Distutils
import
build_ext
from
distutils.core
import
Distribution
,
Extension
from
distutils.command.build_ext
import
build_ext
import
Cython
from
Cython.Compiler.Main
import
Context
,
CompilationOptions
,
default_options
from
Cython.Compiler.ParseTreeTransforms
import
CythonTransform
,
SkipDeclarations
,
AnalyseDeclarationsTransform
from
Cython.Compiler.TreeFragment
import
parse_from_strings
from
Cython.Build.Dependencies
import
strip_string_literals
,
cythonize
_code_cache
=
{}
...
...
@@ -81,6 +83,7 @@ def cython_inline(code,
locals
=
None
,
globals
=
None
,
**
kwds
):
code
,
literals
=
strip_string_literals
(
code
)
code
=
strip_common_indent
(
code
)
ctx
=
Context
(
include_dirs
,
default_options
)
if
locals
is
None
:
...
...
@@ -103,42 +106,54 @@ def cython_inline(code,
arg_names
=
kwds
.
keys
()
arg_names
.
sort
()
arg_sigs
=
tuple
([(
get_type
(
kwds
[
arg
],
ctx
),
arg
)
for
arg
in
arg_names
])
key
=
code
,
arg_sigs
module
=
_code_cache
.
get
(
key
)
if
not
module
:
key
=
code
,
arg_sigs
,
sys
.
version_info
,
sys
.
executable
,
Cython
.
__version__
module_name
=
"_cython_inline_"
+
hashlib
.
md5
(
str
(
key
)).
hexdigest
()
# # TODO: Does this cover all the platforms?
# if (not os.path.exists(os.path.join(lib_dir, module_name + ".so")) and
# not os.path.exists(os.path.join(lib_dir, module_name + ".dll"))):
try
:
if
not
os
.
path
.
exists
(
lib_dir
):
os
.
makedirs
(
lib_dir
)
if
lib_dir
not
in
sys
.
path
:
sys
.
path
.
append
(
lib_dir
)
__import__
(
module_name
)
except
ImportError
:
c_include_dirs
=
[]
cimports
=
[]
qualified
=
re
.
compile
(
r'([.\
w]+)[.]
')
for type, _ in arg_sigs:
m = qualified.match(type)
if m:
cimports.append('
\
ncimport
%
s
' % m.groups()[0])
# one special case
if m.groups()[0] == '
numpy
':
import numpy
c_include_dirs.append(numpy.get_include())
module_body, func_body = extract_func_code(code)
params = '
,
'.join(['
%
s
%
s
' % a for a in arg_sigs])
module_code = """
%(cimports)s
%(module_body)s
%(cimports)s
def __invoke(%(params)s):
%(func_body)s
""" % {'
cimports
': '
\
n
'.join(cimports), '
module_body
': module_body, '
params
': params, '
func_body
': func_body }
# print module_code
_, pyx_file = tempfile.mkstemp('
.
pyx
')
for key, value in literals.items():
module_code = module_code.replace(key, value)
pyx_file = os.path.join(lib_dir, module_name + '
.
pyx
')
open(pyx_file, '
w
').write(module_code)
module = "_" + hashlib.md5(code + str(arg_sigs)).hexdigest()
extension = Extension(
name = module,
name = module
_name
,
sources = [pyx_file],
pyrex_include_dirs =
include_dirs)
include_dirs = c_
include_dirs)
build_extension = build_ext(Distribution())
build_extension.finalize_options()
build_extension.extensions =
[extension]
build_extension.extensions =
cythonize([extension])
build_extension.build_temp = os.path.dirname(pyx_file)
if lib_dir not in sys.path:
sys.path.append(lib_dir)
build_extension.build_lib = lib_dir
build_extension.run()
_code_cache[key] = module
_code_cache[key] = module
_name
arg_list = [kwds[arg] for arg in arg_names]
return __import__(module).__invoke(*arg_list)
return __import__(module
_name
).__invoke(*arg_list)
non_space = re.compile('
[
^
]
')
def strip_common_indent(code):
...
...
@@ -165,7 +180,6 @@ module_statement = re.compile(r'^((cdef +(extern|class))|cimport|(from .+ cimpor
def
extract_func_code
(
code
):
module
=
[]
function
=
[]
# TODO: string literals, backslash
current
=
function
code
=
code
.
replace
(
'
\
t
'
,
' '
)
lines
=
code
.
split
(
'
\
n
'
)
...
...
@@ -177,3 +191,54 @@ def extract_func_code(code):
current
=
function
current
.
append
(
line
)
return
'
\
n
'
.
join
(
module
),
' '
+
'
\
n
'
.
join
(
function
)
try
:
from
inspect
import
getcallargs
except
ImportError
:
def
getcallargs
(
func
,
*
arg_values
,
**
kwd_values
):
all
=
{}
args
,
varargs
,
kwds
,
defaults
=
inspect
.
getargspec
(
func
)
if
varargs
is
not
None
:
all
[
varargs
]
=
arg_values
[
len
(
args
):]
for
name
,
value
in
zip
(
args
,
arg_values
):
all
[
name
]
=
value
for
name
,
value
in
kwd_values
.
items
():
if
name
in
args
:
if
name
in
all
:
raise
TypeError
,
"Duplicate argument %s"
%
name
all
[
name
]
=
kwd_values
.
pop
(
name
)
if
kwds
is
not
None
:
all
[
kwds
]
=
kwd_values
elif
kwd_values
:
raise
TypeError
,
"Unexpected keyword arguments: %s"
%
kwd_values
.
keys
()
if
defaults
is
None
:
defaults
=
()
first_default
=
len
(
args
)
-
len
(
defaults
)
for
ix
,
name
in
enumerate
(
args
):
if
name
not
in
all
:
if
ix
>=
first_default
:
all
[
name
]
=
defaults
[
ix
-
first_default
]
else
:
raise
TypeError
,
"Missing argument: %s"
%
name
return
all
def
get_body
(
source
):
ix
=
source
.
index
(
':'
)
if
source
[:
5
]
==
'lambda'
:
return
"return %s"
%
source
[
ix
+
1
:]
else
:
return
source
[
ix
+
1
:]
# Lots to be done here... It would be especially cool if compiled functions
# could invoke each other quickly.
class
RuntimeCompiledFunction
(
object
):
def
__init__
(
self
,
f
):
self
.
_f
=
f
self
.
_body
=
get_body
(
inspect
.
getsource
(
f
))
def
__call__
(
self
,
*
args
,
**
kwds
):
all
=
getcallargs
(
self
.
_f
,
*
args
,
**
kwds
)
return
cython_inline
(
self
.
_body
,
locals
=
self
.
_f
.
func_globals
,
globals
=
self
.
_f
.
func_globals
,
**
all
)
Cython/Compiler/Builtin.py
View file @
4f208e13
...
...
@@ -8,154 +8,60 @@ from TypeSlots import Signature
import
PyrexTypes
import
Naming
builtin_function_table
=
[
# name, args, return, C API func, py equiv = "*"
(
'abs'
,
"O"
,
"O"
,
"PyNumber_Absolute"
),
#('chr', "", "", ""),
#('cmp', "", "", "", ""), # int PyObject_Cmp(PyObject *o1, PyObject *o2, int *result)
#('compile', "", "", ""), # PyObject* Py_CompileString( char *str, char *filename, int start)
(
'delattr'
,
"OO"
,
"r"
,
"PyObject_DelAttr"
),
(
'dir'
,
"O"
,
"O"
,
"PyObject_Dir"
),
(
'divmod'
,
"OO"
,
"O"
,
"PyNumber_Divmod"
),
(
'exec'
,
"OOO"
,
"O"
,
"__Pyx_PyRun"
),
#('eval', "", "", ""),
#('execfile', "", "", ""),
#('filter', "", "", ""),
#('getattr', "OO", "O", "PyObject_GetAttr"), # optimised later on
(
'getattr3'
,
"OOO"
,
"O"
,
"__Pyx_GetAttr3"
,
"getattr"
),
(
'hasattr'
,
"OO"
,
"b"
,
"PyObject_HasAttr"
),
(
'hash'
,
"O"
,
"l"
,
"PyObject_Hash"
),
#('hex', "", "", ""),
#('id', "", "", ""),
#('input', "", "", ""),
(
'intern'
,
"O"
,
"O"
,
"__Pyx_Intern"
),
(
'isinstance'
,
"OO"
,
"b"
,
"PyObject_IsInstance"
),
(
'issubclass'
,
"OO"
,
"b"
,
"PyObject_IsSubclass"
),
#('iter', "O", "O", "PyObject_GetIter"), # optimised later on
(
'len'
,
"O"
,
"z"
,
"PyObject_Length"
),
(
'locals'
,
""
,
"O"
,
"__pyx_locals"
),
#('map', "", "", ""),
#('max', "", "", ""),
#('min', "", "", ""),
#('oct', "", "", ""),
# Not worth doing open, when second argument would become mandatory
#('open', "ss", "O", "PyFile_FromString"),
#('ord', "", "", ""),
(
'pow'
,
"OOO"
,
"O"
,
"PyNumber_Power"
),
#('range', "", "", ""),
#('raw_input', "", "", ""),
#('reduce', "", "", ""),
(
'reload'
,
"O"
,
"O"
,
"PyImport_ReloadModule"
),
(
'repr'
,
"O"
,
"O"
,
"PyObject_Repr"
),
#('round', "", "", ""),
(
'setattr'
,
"OOO"
,
"r"
,
"PyObject_SetAttr"
),
#('sum', "", "", ""),
#('type', "O", "O", "PyObject_Type"),
#('unichr', "", "", ""),
#('unicode', "", "", ""),
#('vars', "", "", ""),
#('zip', "", "", ""),
# Can't do these easily until we have builtin type entries.
#('typecheck', "OO", "i", "PyObject_TypeCheck", False),
#('issubtype', "OO", "i", "PyType_IsSubtype", False),
# Put in namespace append optimization.
(
'__Pyx_PyObject_Append'
,
"OO"
,
"O"
,
"__Pyx_PyObject_Append"
),
]
# Builtin types
# bool
# buffer
# classmethod
# dict
# enumerate
# file
# float
# int
# list
# long
# object
# property
# slice
# staticmethod
# super
# str
# tuple
# type
# xrange
builtin_types_table
=
[
(
"type"
,
"PyType_Type"
,
[]),
# This conflicts with the C++ bool type, and unfortunately
# C++ is too liberal about PyObject* <-> bool conversions,
# resulting in unintuitive runtime behavior and segfaults.
# ("bool", "PyBool_Type", []),
(
"int"
,
"PyInt_Type"
,
[]),
(
"long"
,
"PyLong_Type"
,
[]),
(
"float"
,
"PyFloat_Type"
,
[]),
# Until we have a way to access attributes of a type,
# we don't want to make this one builtin.
# ("complex", "PyComplex_Type", []),
(
"bytes"
,
"PyBytes_Type"
,
[]),
(
"str"
,
"PyString_Type"
,
[]),
(
"unicode"
,
"PyUnicode_Type"
,
[]),
(
"tuple"
,
"PyTuple_Type"
,
[]),
(
"list"
,
"PyList_Type"
,
[(
"insert"
,
"OzO"
,
"i"
,
"PyList_Insert"
)]),
(
"dict"
,
"PyDict_Type"
,
[(
"items"
,
"O"
,
"O"
,
"PyDict_Items"
),
(
"keys"
,
"O"
,
"O"
,
"PyDict_Keys"
),
(
"values"
,
"O"
,
"O"
,
"PyDict_Values"
),
(
"copy"
,
"O"
,
"O"
,
"PyDict_Copy"
)]),
(
"slice"
,
"PySlice_Type"
,
[]),
# ("file", "PyFile_Type", []), # not in Py3
# C-level implementations of builtin types, functions and methods
(
"set"
,
"PySet_Type"
,
[(
"clear"
,
"O"
,
"i"
,
"PySet_Clear"
),
(
"discard"
,
"OO"
,
"i"
,
"PySet_Discard"
),
(
"add"
,
"OO"
,
"i"
,
"PySet_Add"
),
(
"pop"
,
"O"
,
"O"
,
"PySet_Pop"
)]),
(
"frozenset"
,
"PyFrozenSet_Type"
,
[]),
]
pow2_utility_code
=
UtilityCode
(
proto
=
"""
#define __Pyx_PyNumber_Power2(a, b) PyNumber_Power(a, b, Py_None)
"""
)
types_that_construct_their_instance
=
(
# some builtin types do not always return an instance of
# themselves - these do:
'type'
,
'bool'
,
'long'
,
'float'
,
'bytes'
,
'unicode'
,
'tuple'
,
'list'
,
'dict'
,
'set'
,
'frozenset'
# 'str', # only in Py3.x
# 'file', # only in Py2.x
)
include_string_h_utility_code
=
UtilityCode
(
proto
=
"""
#include <string.h>
"""
)
builtin_structs_table
=
[
(
'Py_buffer'
,
'Py_buffer'
,
[(
"buf"
,
PyrexTypes
.
c_void_ptr_type
),
(
"obj"
,
PyrexTypes
.
py_object_type
),
(
"len"
,
PyrexTypes
.
c_py_ssize_t_type
),
(
"itemsize"
,
PyrexTypes
.
c_py_ssize_t_type
),
(
"readonly"
,
PyrexTypes
.
c_bint_type
),
(
"ndim"
,
PyrexTypes
.
c_int_type
),
(
"format"
,
PyrexTypes
.
c_char_ptr_type
),
(
"shape"
,
PyrexTypes
.
c_py_ssize_t_ptr_type
),
(
"strides"
,
PyrexTypes
.
c_py_ssize_t_ptr_type
),
(
"suboffsets"
,
PyrexTypes
.
c_py_ssize_t_ptr_type
),
(
"internal"
,
PyrexTypes
.
c_void_ptr_type
),
])
]
iter_next_utility_code
=
UtilityCode
(
proto
=
"""
#define __Pyx_PyIter_Next(obj) __Pyx_PyIter_Next2(obj, NULL);
static CYTHON_INLINE PyObject *__Pyx_PyIter_Next2(PyObject *, PyObject *); /*proto*/
"""
,
# copied from Py3's builtin_next()
impl
=
'''
static CYTHON_INLINE PyObject *__Pyx_PyIter_Next2(PyObject* iterator, PyObject* defval) {
PyObject* next;
if (unlikely(!PyIter_Check(iterator))) {
PyErr_Format(PyExc_TypeError,
"%.200s object is not an iterator", iterator->ob_type->tp_name);
return NULL;
}
next = (*(Py_TYPE(iterator)->tp_iternext))(iterator);
if (likely(next)) {
return next;
} else if (defval) {
if (PyErr_Occurred()) {
if(!PyErr_ExceptionMatches(PyExc_StopIteration))
return NULL;
PyErr_Clear();
}
Py_INCREF(defval);
return defval;
} else if (PyErr_Occurred()) {
return NULL;
} else {
PyErr_SetNone(PyExc_StopIteration);
return NULL;
}
}
'''
)
getattr3_utility_code
=
UtilityCode
(
proto
=
"""
static PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); /*proto*/
static
CYTHON_INLINE
PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); /*proto*/
"""
,
impl
=
"""
static PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) {
static
CYTHON_INLINE
PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) {
PyObject *r = PyObject_GetAttr(o, n);
if (!r) {
if (!PyErr_ExceptionMatches(PyExc_AttributeError))
...
...
@@ -166,7 +72,7 @@ static PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) {
}
return r;
bad:
return
0
;
return
NULL
;
}
"""
)
...
...
@@ -181,8 +87,13 @@ proto = """
#endif
#endif
static PyObject* __Pyx_PyRun(PyObject*, PyObject*, PyObject*);
static CYTHON_INLINE PyObject* __Pyx_PyRun2(PyObject*, PyObject*);
"""
,
impl
=
"""
static CYTHON_INLINE PyObject* __Pyx_PyRun2(PyObject* o, PyObject* globals) {
return __Pyx_PyRun(o, globals, NULL);
}
static PyObject* __Pyx_PyRun(PyObject* o, PyObject* globals, PyObject* locals) {
PyObject* result;
PyObject* s = 0;
...
...
@@ -374,36 +285,222 @@ Py_XDECREF(__Pyx_PyFrozenSet_Type); __Pyx_PyFrozenSet_Type = NULL;
"""
)
builtin_utility_code
=
{
'exec'
:
pyexec_utility_code
,
'getattr3'
:
getattr3_utility_code
,
'intern'
:
intern_utility_code
,
'set'
:
py23_set_utility_code
,
'frozenset'
:
py23_set_utility_code
,
}
builtin_scope
=
BuiltinScope
()
def
declare_builtin_func
(
name
,
args
,
ret
,
cname
,
py_equiv
=
"*"
):
sig
=
Signature
(
args
,
ret
)
type
=
sig
.
function_type
()
utility
=
builtin_utility_code
.
get
(
name
)
builtin_scope
.
declare_builtin_cfunction
(
name
,
type
,
cname
,
py_equiv
,
utility
)
# mapping from builtins to their C-level equivalents
class
_BuiltinOverride
(
object
):
def
__init__
(
self
,
py_name
,
args
,
ret_type
,
cname
,
py_equiv
=
"*"
,
utility_code
=
None
,
sig
=
None
,
func_type
=
None
):
self
.
py_name
,
self
.
cname
,
self
.
py_equiv
=
py_name
,
cname
,
py_equiv
self
.
args
,
self
.
ret_type
=
args
,
ret_type
self
.
func_type
,
self
.
sig
=
func_type
,
sig
self
.
utility_code
=
utility_code
class
BuiltinFunction
(
_BuiltinOverride
):
def
declare_in_scope
(
self
,
scope
):
func_type
,
sig
=
self
.
func_type
,
self
.
sig
if
func_type
is
None
:
if
sig
is
None
:
sig
=
Signature
(
self
.
args
,
self
.
ret_type
)
func_type
=
sig
.
function_type
()
scope
.
declare_builtin_cfunction
(
self
.
py_name
,
func_type
,
self
.
cname
,
self
.
py_equiv
,
self
.
utility_code
)
class
BuiltinMethod
(
_BuiltinOverride
):
def
declare_in_type
(
self
,
self_type
):
method_type
,
sig
=
self
.
func_type
,
self
.
sig
if
method_type
is
None
:
if
sig
is
None
:
sig
=
Signature
(
self
.
args
,
self
.
ret_type
)
# override 'self' type (first argument)
self_arg
=
PyrexTypes
.
CFuncTypeArg
(
""
,
self_type
,
None
)
self_arg
.
not_none
=
True
method_type
=
sig
.
function_type
(
self_arg
)
self_type
.
scope
.
declare_builtin_cfunction
(
self
.
py_name
,
method_type
,
self
.
cname
,
utility_code
=
self
.
utility_code
)
builtin_function_table
=
[
# name, args, return, C API func, py equiv = "*"
BuiltinFunction
(
'abs'
,
"O"
,
"O"
,
"PyNumber_Absolute"
),
#('chr', "", "", ""),
#('cmp', "", "", "", ""), # int PyObject_Cmp(PyObject *o1, PyObject *o2, int *result)
#('compile', "", "", ""), # PyObject* Py_CompileString( char *str, char *filename, int start)
BuiltinFunction
(
'delattr'
,
"OO"
,
"r"
,
"PyObject_DelAttr"
),
BuiltinFunction
(
'dir'
,
"O"
,
"O"
,
"PyObject_Dir"
),
BuiltinFunction
(
'divmod'
,
"OO"
,
"O"
,
"PyNumber_Divmod"
),
BuiltinFunction
(
'exec'
,
"OOO"
,
"O"
,
"__Pyx_PyRun"
,
utility_code
=
pyexec_utility_code
),
BuiltinFunction
(
'exec'
,
"OO"
,
"O"
,
"__Pyx_PyRun2"
,
utility_code
=
pyexec_utility_code
),
#('eval', "", "", ""),
#('execfile', "", "", ""),
#('filter', "", "", ""),
BuiltinFunction
(
'getattr'
,
"OO"
,
"O"
,
"PyObject_GetAttr"
),
BuiltinFunction
(
'getattr'
,
"OOO"
,
"O"
,
"__Pyx_GetAttr3"
,
utility_code
=
getattr3_utility_code
),
BuiltinFunction
(
'getattr3'
,
"OOO"
,
"O"
,
"__Pyx_GetAttr3"
,
"getattr"
,
utility_code
=
getattr3_utility_code
),
# Pyrex compatibility
BuiltinFunction
(
'hasattr'
,
"OO"
,
"b"
,
"PyObject_HasAttr"
),
BuiltinFunction
(
'hash'
,
"O"
,
"l"
,
"PyObject_Hash"
),
#('hex', "", "", ""),
#('id', "", "", ""),
#('input', "", "", ""),
BuiltinFunction
(
'intern'
,
"O"
,
"O"
,
"__Pyx_Intern"
,
utility_code
=
intern_utility_code
),
BuiltinFunction
(
'isinstance'
,
"OO"
,
"b"
,
"PyObject_IsInstance"
),
BuiltinFunction
(
'issubclass'
,
"OO"
,
"b"
,
"PyObject_IsSubclass"
),
BuiltinFunction
(
'iter'
,
"OO"
,
"O"
,
"PyCallIter_New"
),
BuiltinFunction
(
'iter'
,
"O"
,
"O"
,
"PyObject_GetIter"
),
BuiltinFunction
(
'len'
,
"O"
,
"z"
,
"PyObject_Length"
),
BuiltinFunction
(
'locals'
,
""
,
"O"
,
"__pyx_locals"
),
#('map', "", "", ""),
#('max', "", "", ""),
#('min', "", "", ""),
BuiltinFunction
(
'next'
,
"O"
,
"O"
,
"__Pyx_PyIter_Next"
,
utility_code
=
iter_next_utility_code
),
# not available in Py2 => implemented here
BuiltinFunction
(
'next'
,
"OO"
,
"O"
,
"__Pyx_PyIter_Next2"
,
utility_code
=
iter_next_utility_code
),
# not available in Py2 => implemented here
#('oct', "", "", ""),
#('open', "ss", "O", "PyFile_FromString"), # not in Py3
#('ord', "", "", ""),
BuiltinFunction
(
'pow'
,
"OOO"
,
"O"
,
"PyNumber_Power"
),
BuiltinFunction
(
'pow'
,
"OO"
,
"O"
,
"__Pyx_PyNumber_Power2"
,
utility_code
=
pow2_utility_code
),
#('range', "", "", ""),
#('raw_input', "", "", ""),
#('reduce', "", "", ""),
BuiltinFunction
(
'reload'
,
"O"
,
"O"
,
"PyImport_ReloadModule"
),
BuiltinFunction
(
'repr'
,
"O"
,
"O"
,
"PyObject_Repr"
),
#('round', "", "", ""),
BuiltinFunction
(
'setattr'
,
"OOO"
,
"r"
,
"PyObject_SetAttr"
),
#('sum', "", "", ""),
#('type', "O", "O", "PyObject_Type"),
#('unichr', "", "", ""),
#('unicode', "", "", ""),
#('vars', "", "", ""),
#('zip', "", "", ""),
# Can't do these easily until we have builtin type entries.
#('typecheck', "OO", "i", "PyObject_TypeCheck", False),
#('issubtype', "OO", "i", "PyType_IsSubtype", False),
# Put in namespace append optimization.
BuiltinFunction
(
'__Pyx_PyObject_Append'
,
"OO"
,
"O"
,
"__Pyx_PyObject_Append"
),
]
# Builtin types
# bool
# buffer
# classmethod
# dict
# enumerate
# file
# float
# int
# list
# long
# object
# property
# slice
# staticmethod
# super
# str
# tuple
# type
# xrange
builtin_types_table
=
[
(
"type"
,
"PyType_Type"
,
[]),
# This conflicts with the C++ bool type, and unfortunately
# C++ is too liberal about PyObject* <-> bool conversions,
# resulting in unintuitive runtime behavior and segfaults.
# ("bool", "PyBool_Type", []),
(
"int"
,
"PyInt_Type"
,
[]),
(
"long"
,
"PyLong_Type"
,
[]),
(
"float"
,
"PyFloat_Type"
,
[]),
# Until we have a way to access attributes of a type,
# we don't want to make this one builtin.
# ("complex", "PyComplex_Type", []),
(
"bytes"
,
"PyBytes_Type"
,
[]),
(
"str"
,
"PyString_Type"
,
[]),
(
"unicode"
,
"PyUnicode_Type"
,
[
BuiltinMethod
(
"join"
,
"TO"
,
"T"
,
"PyUnicode_Join"
),
]),
(
"tuple"
,
"PyTuple_Type"
,
[]),
(
"list"
,
"PyList_Type"
,
[
BuiltinMethod
(
"insert"
,
"TzO"
,
"i"
,
"PyList_Insert"
),
BuiltinMethod
(
"reverse"
,
"T"
,
"i"
,
"PyList_Reverse"
),
BuiltinMethod
(
"append"
,
"TO"
,
"i"
,
"PyList_Append"
),
]),
(
"dict"
,
"PyDict_Type"
,
[
BuiltinMethod
(
"items"
,
"T"
,
"O"
,
"PyDict_Items"
),
# FIXME: Py3 mode?
BuiltinMethod
(
"keys"
,
"T"
,
"O"
,
"PyDict_Keys"
),
# FIXME: Py3 mode?
BuiltinMethod
(
"values"
,
"T"
,
"O"
,
"PyDict_Values"
),
# FIXME: Py3 mode?
BuiltinMethod
(
"copy"
,
"T"
,
"T"
,
"PyDict_Copy"
)]),
(
"slice"
,
"PySlice_Type"
,
[]),
# ("file", "PyFile_Type", []), # not in Py3
(
"set"
,
"PySet_Type"
,
[
BuiltinMethod
(
"clear"
,
"T"
,
"i"
,
"PySet_Clear"
),
BuiltinMethod
(
"discard"
,
"TO"
,
"i"
,
"PySet_Discard"
),
BuiltinMethod
(
"add"
,
"TO"
,
"i"
,
"PySet_Add"
),
BuiltinMethod
(
"pop"
,
"T"
,
"O"
,
"PySet_Pop"
)]),
(
"frozenset"
,
"PyFrozenSet_Type"
,
[]),
]
types_that_construct_their_instance
=
(
# some builtin types do not always return an instance of
# themselves - these do:
'type'
,
'bool'
,
'long'
,
'float'
,
'bytes'
,
'unicode'
,
'tuple'
,
'list'
,
'dict'
,
'set'
,
'frozenset'
# 'str', # only in Py3.x
# 'file', # only in Py2.x
)
builtin_structs_table
=
[
(
'Py_buffer'
,
'Py_buffer'
,
[(
"buf"
,
PyrexTypes
.
c_void_ptr_type
),
(
"obj"
,
PyrexTypes
.
py_object_type
),
(
"len"
,
PyrexTypes
.
c_py_ssize_t_type
),
(
"itemsize"
,
PyrexTypes
.
c_py_ssize_t_type
),
(
"readonly"
,
PyrexTypes
.
c_bint_type
),
(
"ndim"
,
PyrexTypes
.
c_int_type
),
(
"format"
,
PyrexTypes
.
c_char_ptr_type
),
(
"shape"
,
PyrexTypes
.
c_py_ssize_t_ptr_type
),
(
"strides"
,
PyrexTypes
.
c_py_ssize_t_ptr_type
),
(
"suboffsets"
,
PyrexTypes
.
c_py_ssize_t_ptr_type
),
(
"internal"
,
PyrexTypes
.
c_void_ptr_type
),
])
]
# set up builtin scope
builtin_scope
=
BuiltinScope
()
def
init_builtin_funcs
():
for
desc
in
builtin_function_table
:
declare_builtin_func
(
*
desc
)
for
bf
in
builtin_function_table
:
bf
.
declare_in_scope
(
builtin_scope
)
builtin_types
=
{}
def
init_builtin_types
():
global
builtin_types
for
name
,
cname
,
func
s
in
builtin_types_table
:
for
name
,
cname
,
method
s
in
builtin_types_table
:
utility
=
builtin_utility_code
.
get
(
name
)
the_type
=
builtin_scope
.
declare_builtin_type
(
name
,
cname
,
utility
)
builtin_types
[
name
]
=
the_type
for
name
,
args
,
ret
,
cname
in
funcs
:
sig
=
Signature
(
args
,
ret
)
the_type
.
scope
.
declare_cfunction
(
name
,
sig
.
function_type
(),
None
,
cname
)
for
method
in
methods
:
method
.
declare_in_type
(
the_type
)
def
init_builtin_structs
():
for
name
,
cname
,
attribute_types
in
builtin_structs_table
:
...
...
Cython/Compiler/Code.py
View file @
4f208e13
...
...
@@ -759,16 +759,22 @@ class GlobalState(object):
try:
return self.input_file_contents[source_desc]
except KeyError:
pass
source_file = source_desc.get_lines(encoding='ASCII',
error_handling='ignore')
try:
F = [u' * ' + line.rstrip().replace(
u'*/', u'*[inserted by cython to avoid comment closer]/'
).replace(
u'/*', u'/[inserted by cython to avoid comment start]*'
)
for line in source_desc.get_lines(encoding='ASCII',
error_handling='ignore')]
if len(F) == 0: F.append(u'')
self.input_file_contents[source_desc] = F
return F
for line in source_file]
finally:
if hasattr(source_file, 'close'):
source_file.close()
if not F: F.append(u'')
self.input_file_contents[source_desc] = F
return F
#
# Utility code state
...
...
Cython/Compiler/ExprNodes.py
View file @
4f208e13
...
...
@@ -651,7 +651,11 @@ class ExprNode(Node):
return
self
.
result_in_temp
()
def
may_be_none
(
self
):
return
self
.
type
.
is_pyobject
if
not
self
.
type
.
is_pyobject
:
return
False
if
self
.
constant_result
not
in
(
not_a_constant
,
constant_value_not_set
):
return
self
.
constant_result
is
not
None
return
True
def
as_cython_attribute
(
self
):
return
None
...
...
@@ -803,22 +807,56 @@ class IntNode(ConstNode):
def
__init__
(
self
,
pos
,
**
kwds
):
ExprNode
.
__init__
(
self
,
pos
,
**
kwds
)
if
'type'
not
in
kwds
:
rank
=
max
(
1
,
len
(
self
.
longness
))
sign
=
not
self
.
unsigned
self
.
type
=
PyrexTypes
.
modifiers_and_name_to_type
[
sign
,
rank
,
"int"
]
self
.
type
=
self
.
find_suitable_type_for_value
()
def
find_suitable_type_for_value
(
self
):
if
self
.
constant_result
is
constant_value_not_set
:
try
:
self
.
calculate_constant_result
()
except
ValueError
:
pass
if
self
.
constant_result
in
(
constant_value_not_set
,
not_a_constant
)
or
\
self
.
unsigned
or
self
.
longness
==
'LL'
:
# clearly a C literal
rank
=
(
self
.
longness
==
'LL'
)
and
2
or
1
suitable_type
=
PyrexTypes
.
modifiers_and_name_to_type
[
not
self
.
unsigned
,
rank
,
"int"
]
if
self
.
type
:
suitable_type
=
PyrexTypes
.
widest_numeric_type
(
suitable_type
,
self
.
type
)
else
:
# C literal or Python literal - split at 32bit boundary
if
self
.
constant_result
>=
-
2
**
31
and
self
.
constant_result
<
2
**
31
:
if
self
.
type
and
self
.
type
.
is_int
:
suitable_type
=
self
.
type
else
:
suitable_type
=
PyrexTypes
.
c_long_type
else
:
suitable_type
=
PyrexTypes
.
py_object_type
return
suitable_type
def
coerce_to
(
self
,
dst_type
,
env
):
if
self
.
type
is
dst_type
:
return
self
elif
dst_type
.
is_float
:
float_value
=
float
(
self
.
value
)
return
FloatNode
(
self
.
pos
,
value
=
repr
(
float_value
),
constant_result
=
float_value
)
if
self
.
constant_result
is
not
not_a_constant
:
float_value
=
float
(
self
.
constant_result
)
return
FloatNode
(
self
.
pos
,
value
=
repr
(
float_value
),
type
=
dst_type
,
constant_result
=
float_value
)
else
:
return
FloatNode
(
self
.
pos
,
value
=
self
.
value
,
type
=
dst_type
,
constant_result
=
not_a_constant
)
node
=
IntNode
(
self
.
pos
,
value
=
self
.
value
,
constant_result
=
self
.
constant_result
,
unsigned
=
self
.
unsigned
,
longness
=
self
.
longness
)
type
=
dst_type
,
unsigned
=
self
.
unsigned
,
longness
=
self
.
longness
)
if
dst_type
.
is_numeric
and
not
dst_type
.
is_complex
:
node
=
IntNode
(
self
.
pos
,
value
=
self
.
value
,
constant_result
=
self
.
constant_result
,
type
=
dst_type
,
unsigned
=
self
.
unsigned
,
longness
=
self
.
longness
)
return
node
if
dst_type
.
is_pyobject
:
node
.
type
=
PyrexTypes
.
py_object_type
elif
dst_type
.
is_pyobject
:
node
=
IntNode
(
self
.
pos
,
value
=
self
.
value
,
constant_result
=
self
.
constant_result
,
type
=
PyrexTypes
.
py_object_type
,
unsigned
=
self
.
unsigned
,
longness
=
self
.
longness
)
else
:
# not setting the type here!
node
=
IntNode
(
self
.
pos
,
value
=
self
.
value
,
constant_result
=
self
.
constant_result
,
unsigned
=
self
.
unsigned
,
longness
=
self
.
longness
)
# We still need to perform normal coerce_to processing on the
# result, because we might be coercing to an extension type,
# in which case a type test node will be needed.
...
...
@@ -1450,7 +1488,7 @@ class NameNode(AtomicExprNode):
return
# There was an error earlier
if
entry
.
is_builtin
and
Options
.
cache_builtins
:
return
# Lookup already cached
elif
entry
.
is_
real_dict
:
elif
entry
.
is_
pyclass_attr
:
assert
entry
.
type
.
is_pyobject
,
"Python global or builtin not a Python object"
interned_cname
=
code
.
intern_identifier
(
self
.
entry
.
name
)
if
entry
.
is_builtin
:
...
...
@@ -1459,7 +1497,7 @@ class NameNode(AtomicExprNode):
namespace
=
entry
.
scope
.
namespace_cname
code
.
globalstate
.
use_utility_code
(
getitem_dict_utility_code
)
code
.
putln
(
'%s =
__Pyx_PyDi
ct_GetItem(%s, %s); %s'
%
(
'%s =
PyObje
ct_GetItem(%s, %s); %s'
%
(
self
.
result
(),
namespace
,
interned_cname
,
...
...
@@ -1521,9 +1559,9 @@ class NameNode(AtomicExprNode):
# in Py2.6+, we need to invalidate the method cache
code
.
putln
(
"PyType_Modified(%s);"
%
entry
.
scope
.
parent_type
.
typeptr_cname
)
elif
entry
.
is_
real_dict
:
elif
entry
.
is_
pyclass_attr
:
code
.
put_error_if_neg
(
self
.
pos
,
'Py
Di
ct_SetItem(%s, %s, %s)'
%
(
'Py
Obje
ct_SetItem(%s, %s, %s)'
%
(
namespace
,
interned_cname
,
rhs
.
py_result
()))
...
...
@@ -1608,10 +1646,10 @@ class NameNode(AtomicExprNode):
if
not
self
.
entry
.
is_pyglobal
:
error
(
self
.
pos
,
"Deletion of local or C global name not supported"
)
return
if
self
.
entry
.
is_
real_dict
:
if
self
.
entry
.
is_
pyclass_attr
:
namespace
=
self
.
entry
.
scope
.
namespace_cname
code
.
put_error_if_neg
(
self
.
pos
,
'Py
Dict
_DelItemString(%s, "%s")'
%
(
'Py
Mapping
_DelItemString(%s, "%s")'
%
(
namespace
,
self
.
entry
.
name
))
else
:
...
...
@@ -2066,7 +2104,6 @@ class IndexNode(ExprNode):
skip_child_analysis
=
False
buffer_access
=
False
if
self
.
base
.
type
.
is_buffer
:
assert
hasattr
(
self
.
base
,
"entry"
)
# Must be a NameNode-like node
if
self
.
indices
:
indices
=
self
.
indices
else
:
...
...
@@ -2081,6 +2118,8 @@ class IndexNode(ExprNode):
x
.
analyse_types
(
env
)
if
not
x
.
type
.
is_int
:
buffer_access
=
False
if
buffer_access
:
assert
hasattr
(
self
.
base
,
"entry"
)
# Must be a NameNode-like node
# On cloning, indices is cloned. Otherwise, unpack index into indices
assert
not
(
buffer_access
and
isinstance
(
self
.
index
,
CloneNode
))
...
...
@@ -2742,6 +2781,7 @@ class SimpleCallNode(CallNode):
wrapper_call
=
False
has_optional_args
=
False
nogil
=
False
analysed
=
False
def
compile_time_value
(
self
,
denv
):
function
=
self
.
function
.
compile_time_value
(
denv
)
...
...
@@ -2795,6 +2835,9 @@ class SimpleCallNode(CallNode):
def
analyse_types
(
self
,
env
):
if
self
.
analyse_as_type_constructor
(
env
):
return
if
self
.
analysed
:
return
self
.
analysed
=
True
function
=
self
.
function
function
.
is_called
=
1
self
.
function
.
analyse_types
(
env
)
...
...
@@ -2836,7 +2879,12 @@ class SimpleCallNode(CallNode):
arg
.
analyse_types
(
env
)
if
self
.
self
and
func_type
.
args
:
# Coerce 'self' to the type expected by the method.
expected_type
=
func_type
.
args
[
0
].
type
self_arg
=
func_type
.
args
[
0
]
if
self_arg
.
not_none
:
# C methods must do the None test for self at *call* time
self
.
self
=
self
.
self
.
as_none_safe_node
(
"'NoneType' object has no attribute '%s'"
%
self
.
function
.
entry
.
name
,
'PyExc_AttributeError'
)
expected_type
=
self_arg
.
type
self
.
coerced_self
=
CloneNode
(
self
.
self
).
coerce_to
(
expected_type
,
env
)
# Insert coerced 'self' argument into argument list.
...
...
@@ -3674,8 +3722,8 @@ class SequenceNode(ExprNode):
arg
=
self
.
args
[
i
]
if
not
skip_children
:
arg
.
analyse_types
(
env
)
self
.
args
[
i
]
=
arg
.
coerce_to_pyobject
(
env
)
self
.
type
=
py_object_type
self
.
is_temp
=
1
# not setting self.type here, subtypes do this
def
may_be_none
(
self
):
return
False
...
...
@@ -3879,7 +3927,7 @@ class TupleNode(SequenceNode):
self
.
is_literal
=
1
else
:
SequenceNode
.
analyse_types
(
self
,
env
,
skip_children
)
def
calculate_result_code
(
self
):
if
len
(
self
.
args
)
>
0
:
error
(
self
.
pos
,
"Positive length tuples must be constructed."
)
...
...
@@ -3935,6 +3983,7 @@ class ListNode(SequenceNode):
# orignial_args [ExprNode] used internally
obj_conversion_errors
=
[]
type
=
list_type
gil_message
=
"Constructing Python list"
...
...
@@ -3953,7 +4002,6 @@ class ListNode(SequenceNode):
hold_errors
()
self
.
original_args
=
list
(
self
.
args
)
SequenceNode
.
analyse_types
(
self
,
env
)
self
.
type
=
list_type
self
.
obj_conversion_errors
=
held_errors
()
release_errors
(
ignore
=
True
)
...
...
@@ -4475,14 +4523,23 @@ class ClassNode(ExprNode, ModuleNameMixin):
# dict ExprNode Class dict (not owned by this node)
# doc ExprNode or None Doc string
# module_name EncodedString Name of defining module
# keyword_args ExprNode or None Py3 Dict of keyword arguments, passed to __new__
# starstar_arg ExprNode or None Py3 Dict of extra keyword args, same here
subexprs
=
[
'bases'
,
'doc'
]
subexprs
=
[
'bases'
,
'
keyword_args'
,
'starstar_arg'
,
'
doc'
]
def
analyse_types
(
self
,
env
):
self
.
bases
.
analyse_types
(
env
)
if
self
.
doc
:
self
.
doc
.
analyse_types
(
env
)
self
.
doc
=
self
.
doc
.
coerce_to_pyobject
(
env
)
if
self
.
keyword_args
:
self
.
keyword_args
.
analyse_types
(
env
)
if
self
.
starstar_arg
:
self
.
starstar_arg
.
analyse_types
(
env
)
# make sure we have a Python object as **kwargs mapping
self
.
starstar_arg
=
\
self
.
starstar_arg
.
coerce_to_pyobject
(
env
)
self
.
type
=
py_object_type
self
.
is_temp
=
1
env
.
use_utility_code
(
create_class_utility_code
);
...
...
@@ -4490,12 +4547,25 @@ class ClassNode(ExprNode, ModuleNameMixin):
self
.
set_mod_name
(
env
)
def
may_be_none
(
self
):
return
Fals
e
return
Tru
e
gil_message
=
"Constructing Python class"
def
generate_result_code
(
self
,
code
):
cname
=
code
.
intern_identifier
(
self
.
name
)
if
self
.
keyword_args
and
self
.
starstar_arg
:
code
.
put_error_if_neg
(
self
.
pos
,
"PyDict_Update(%s, %s)"
%
(
self
.
keyword_args
.
py_result
(),
self
.
starstar_arg
.
py_result
()))
keyword_code
=
self
.
keyword_args
.
py_result
()
elif
self
.
keyword_args
:
keyword_code
=
self
.
keyword_args
.
py_result
()
elif
self
.
starstar_arg
:
keyword_code
=
self
.
starstar_arg
.
py_result
()
else
:
keyword_code
=
'NULL'
if
self
.
doc
:
code
.
put_error_if_neg
(
self
.
pos
,
'PyDict_SetItemString(%s, "__doc__", %s)'
%
(
...
...
@@ -4503,15 +4573,17 @@ class ClassNode(ExprNode, ModuleNameMixin):
self
.
doc
.
py_result
()))
py_mod_name
=
self
.
get_py_mod_name
(
code
)
code
.
putln
(
'%s = __Pyx_CreateClass(%s, %s, %s, %s); %s'
%
(
'%s = __Pyx_CreateClass(%s, %s, %s, %s
, %s
); %s'
%
(
self
.
result
(),
self
.
bases
.
py_result
(),
self
.
dict
.
py_result
(),
cname
,
py_mod_name
,
keyword_code
,
code
.
error_goto_if_null
(
self
.
result
(),
self
.
pos
)))
code
.
put_gotref
(
self
.
py_result
())
class
BoundMethodNode
(
ExprNode
):
# Helper class used in the implementation of Python
# class definitions. Constructs an bound method
...
...
@@ -4972,7 +5044,7 @@ def unop_node(pos, operator, operand):
# Construct unnop node of appropriate class for
# given operator.
if
isinstance
(
operand
,
IntNode
)
and
operator
==
'-'
:
return
IntNode
(
pos
=
operand
.
pos
,
value
=
str
(
-
int
(
operand
.
value
,
0
)))
return
IntNode
(
pos
=
operand
.
pos
,
value
=
str
(
-
Utils
.
str_to_number
(
operand
.
value
)))
elif
isinstance
(
operand
,
UnopNode
)
and
operand
.
operator
==
operator
:
warning
(
pos
,
"Python has no increment/decrement operator: %s%sx = %s(%sx) = x"
%
((
operator
,)
*
4
),
5
)
return
unop_node_classes
[
operator
](
pos
,
...
...
@@ -5263,6 +5335,7 @@ class BinopNode(ExprNode):
# - Allocate temporary for result if needed.
subexprs
=
[
'operand1'
,
'operand2'
]
inplace
=
False
def
calculate_constant_result
(
self
):
func
=
compile_time_binary_operators
[
self
.
operator
]
...
...
@@ -5369,7 +5442,7 @@ class BinopNode(ExprNode):
#print "BinopNode.generate_result_code:", self.operand1, self.operand2 ###
if
self
.
operand1
.
type
.
is_pyobject
:
function
=
self
.
py_operation_function
()
if
function
==
"PyNumber_Power"
:
if
self
.
operator
==
'**'
:
extra_args
=
", Py_None"
else
:
extra_args
=
""
...
...
@@ -5472,7 +5545,10 @@ class NumBinopNode(BinopNode):
BinopNode
.
is_py_operation_types
(
self
,
type1
,
type2
))
def
py_operation_function
(
self
):
return
self
.
py_functions
[
self
.
operator
]
fuction
=
self
.
py_functions
[
self
.
operator
]
if
self
.
inplace
:
fuction
=
fuction
.
replace
(
'PyNumber_'
,
'PyNumber_InPlace'
)
return
fuction
py_functions
=
{
"|"
:
"PyNumber_Or"
,
...
...
@@ -5489,7 +5565,6 @@ class NumBinopNode(BinopNode):
"**"
:
"PyNumber_Power"
}
class
IntBinopNode
(
NumBinopNode
):
# Binary operation taking integer arguments.
...
...
@@ -5952,7 +6027,9 @@ richcmp_constants = {
class
CmpNode
(
object
):
# Mixin class containing code common to PrimaryCmpNodes
# and CascadedCmpNodes.
special_bool_cmp_function
=
None
def
infer_type
(
self
,
env
):
# TODO: Actually implement this (after merging with -unstable).
return
py_object_type
...
...
@@ -6111,6 +6188,7 @@ class CmpNode(object):
def
is_python_result
(
self
):
return
((
self
.
has_python_operands
()
and
self
.
special_bool_cmp_function
is
None
and
self
.
operator
not
in
(
'is'
,
'is_not'
,
'in'
,
'not_in'
)
and
not
self
.
is_c_string_contains
()
and
not
self
.
is_ptr_contains
())
...
...
@@ -6129,6 +6207,16 @@ class CmpNode(object):
return
(
container_type
.
is_ptr
or
container_type
.
is_array
)
\
and
not
container_type
.
is_string
def
find_special_bool_compare_function
(
self
,
env
):
if
self
.
operator
in
(
'=='
,
'!='
):
type1
,
type2
=
self
.
operand1
.
type
,
self
.
operand2
.
type
if
type1
.
is_pyobject
and
type2
.
is_pyobject
:
if
type1
is
Builtin
.
unicode_type
or
type2
is
Builtin
.
unicode_type
:
env
.
use_utility_code
(
pyunicode_equals_utility_code
)
self
.
special_bool_cmp_function
=
"__Pyx_PyUnicode_Equals"
return
True
return
False
def
generate_operation_code
(
self
,
code
,
result_code
,
operand1
,
op
,
operand2
):
if
self
.
type
.
is_pyobject
:
...
...
@@ -6139,19 +6227,29 @@ class CmpNode(object):
negation
=
"!"
else
:
negation
=
""
if
op
==
'in'
or
op
==
'not_in'
:
if
self
.
special_bool_cmp_function
:
if
operand1
.
type
.
is_pyobject
:
result1
=
operand1
.
py_result
()
else
:
result1
=
operand1
.
result
()
if
operand2
.
type
.
is_pyobject
:
result2
=
operand2
.
py_result
()
else
:
result2
=
operand2
.
result
()
code
.
putln
(
"%s = %s(%s, %s, %s); %s"
%
(
result_code
,
self
.
special_bool_cmp_function
,
result1
,
result2
,
richcmp_constants
[
op
],
code
.
error_goto_if_neg
(
result_code
,
self
.
pos
)))
elif
op
==
'in'
or
op
==
'not_in'
:
code
.
globalstate
.
use_utility_code
(
contains_utility_code
)
if
self
.
type
.
is_pyobject
:
coerce_result
=
"__Pyx_PyBoolOrNull_FromLong"
if
op
==
'not_in'
:
negation
=
"__Pyx_NegateNonNeg"
if
operand2
.
type
is
dict_type
:
code
.
globalstate
.
use_utility_code
(
raise_none_iter_error_utility_code
)
code
.
putln
(
"if (unlikely(%s == Py_None)) {"
%
operand2
.
py_result
())
code
.
putln
(
"__Pyx_RaiseNoneNotIterableError(); %s"
%
code
.
error_goto
(
self
.
pos
))
code
.
putln
(
"} else {"
)
method
=
"PyDict_Contains"
else
:
method
=
"PySequence_Contains"
...
...
@@ -6171,9 +6269,6 @@ class CmpNode(object):
operand1
.
py_result
(),
got_ref
,
error_clause
(
result_code
,
self
.
pos
)))
if
operand2
.
type
is
dict_type
:
code
.
putln
(
"}"
)
elif
(
operand1
.
type
.
is_pyobject
and
op
not
in
(
'is'
,
'is_not'
)):
code
.
putln
(
"%s = PyObject_RichCompare(%s, %s, %s); %s"
%
(
...
...
@@ -6262,6 +6357,46 @@ static CYTHON_INLINE int __Pyx_UnicodeContains(PyObject* unicode, Py_UNICODE cha
}
"""
)
pyunicode_equals_utility_code
=
UtilityCode
(
proto
=
"""
static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); /*proto*/
"""
,
impl
=
"""
static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) {
if (s1 == s2) { /* as done by PyObject_RichCompareBool(); also catches the (interned) empty string */
return (equals == Py_EQ);
} else if (PyUnicode_CheckExact(s1) & PyUnicode_CheckExact(s2)) {
if (PyUnicode_GET_SIZE(s1) != PyUnicode_GET_SIZE(s2)) {
return (equals == Py_NE);
} else if (PyUnicode_GET_SIZE(s1) == 1) {
if (equals == Py_EQ)
return (PyUnicode_AS_UNICODE(s1)[0] == PyUnicode_AS_UNICODE(s2)[0]);
else
return (PyUnicode_AS_UNICODE(s1)[0] != PyUnicode_AS_UNICODE(s2)[0]);
} else {
int result = PyUnicode_Compare(s1, s2);
if ((result == -1) && unlikely(PyErr_Occurred()))
return -1;
return (equals == Py_EQ) ? (result == 0) : (result != 0);
}
} else if ((s1 == Py_None) & (s2 == Py_None)) {
return (equals == Py_EQ);
} else if ((s1 == Py_None) & PyUnicode_CheckExact(s2)) {
return (equals == Py_NE);
} else if ((s2 == Py_None) & PyUnicode_CheckExact(s1)) {
return (equals == Py_NE);
} else {
int result;
PyObject* py_result = PyObject_RichCompare(s1, s2, equals);
if (!py_result)
return -1;
result = __Pyx_PyObject_IsTrue(py_result);
Py_DECREF(py_result);
return result;
}
}
"""
)
class
PrimaryCmpNode
(
ExprNode
,
CmpNode
):
# Non-cascaded comparison or first comparison of
...
...
@@ -6330,8 +6465,14 @@ class PrimaryCmpNode(ExprNode, CmpNode):
# Will be transformed by IterationTransform
return
else
:
if
self
.
operand2
.
type
is
dict_type
:
self
.
operand2
=
self
.
operand2
.
as_none_safe_node
(
"'NoneType' object is not iterable"
)
common_type
=
py_object_type
self
.
is_pycmp
=
True
elif
self
.
find_special_bool_compare_function
(
env
):
common_type
=
None
# if coercion needed, the method call above has already done it
self
.
is_pycmp
=
False
# result is bint
self
.
is_temp
=
True
# must check for error return
else
:
common_type
=
self
.
find_common_type
(
env
,
self
.
operator
,
self
.
operand1
)
self
.
is_pycmp
=
common_type
.
is_pyobject
...
...
@@ -6487,6 +6628,8 @@ class CascadedCmpNode(Node, CmpNode):
def
coerce_operands_to_pyobjects
(
self
,
env
):
self
.
operand2
=
self
.
operand2
.
coerce_to_pyobject
(
env
)
if
self
.
operand2
.
type
is
dict_type
and
self
.
operator
in
(
'in'
,
'not_in'
):
self
.
operand2
=
self
.
operand2
.
as_none_safe_node
(
"'NoneType' object is not iterable"
)
if
self
.
cascade
:
self
.
cascade
.
coerce_operands_to_pyobjects
(
env
)
...
...
@@ -6536,13 +6679,14 @@ binop_node_classes = {
"**"
:
PowNode
}
def
binop_node
(
pos
,
operator
,
operand1
,
operand2
):
def
binop_node
(
pos
,
operator
,
operand1
,
operand2
,
inplace
=
False
):
# Construct binop node of appropriate class for
# given operator.
return
binop_node_classes
[
operator
](
pos
,
operator
=
operator
,
operand1
=
operand1
,
operand2
=
operand2
)
operand2
=
operand2
,
inplace
=
inplace
)
#-------------------------------------------------------------------
#
...
...
@@ -7154,44 +7298,115 @@ static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) {
create_class_utility_code
=
UtilityCode
(
proto
=
"""
static PyObject *__Pyx_CreateClass(PyObject *bases, PyObject *dict, PyObject *name, PyObject *modname); /*proto*/
static PyObject *__Pyx_CreateClass(PyObject *bases, PyObject *dict, PyObject *name,
PyObject *modname, PyObject *kwargs); /*proto*/
static int __Pyx_PrepareClass(PyObject *metaclass, PyObject *bases, PyObject *name,
PyObject *mkw, PyObject *dict); /*proto*/
"""
,
impl
=
"""
static PyObject *__Pyx_CreateClass(
PyObject *bases, PyObject *methods, PyObject *name, PyObject *modname)
{
PyObject *result = 0;
#if PY_MAJOR_VERSION < 3
PyObject *metaclass = 0, *base;
#endif
static int __Pyx_PrepareClass(PyObject *metaclass, PyObject *bases, PyObject *name,
PyObject *mkw, PyObject *dict) {
PyObject *prep;
PyObject *pargs;
PyObject *ns;
prep = PyObject_GetAttrString(metaclass, "__prepare__");
if (prep == NULL) {
if (!PyErr_ExceptionMatches(PyExc_AttributeError))
return -1;
PyErr_Clear();
return 0;
}
pargs = PyTuple_New(2);
if (!pargs) {
Py_DECREF(prep);
return -1;
}
Py_INCREF(name);
Py_INCREF(bases);
PyTuple_SET_ITEM(pargs, 0, name);
PyTuple_SET_ITEM(pargs, 1, bases);
ns = PyEval_CallObjectWithKeywords(prep, pargs, mkw);
Py_DECREF(pargs);
Py_DECREF(prep);
if (ns == NULL)
return -1;
/* XXX: This is hack, merge namespace back to dict,
__prepare__ should be ran before dict initialization */
if (PyDict_Merge(dict, ns, 0)) {
Py_DECREF(ns);
return -1;
}
Py_DECREF(ns);
return 0;
}
if (PyDict_SetItemString(methods, "__module__", modname) < 0)
goto bad;
#if PY_MAJOR_VERSION < 3
metaclass = PyDict_GetItemString(methods, "__metaclass__");
static PyObject *__Pyx_CreateClass(PyObject *bases, PyObject *dict, PyObject *name,
PyObject *modname, PyObject *kwargs) {
PyObject *result = NULL;
PyObject *metaclass = NULL;
PyObject *mkw = NULL;
if (metaclass != NULL)
Py_INCREF(metaclass);
else if (PyTuple_Check(bases) && PyTuple_GET_SIZE(bases) > 0) {
base = PyTuple_GET_ITEM(bases, 0);
metaclass = PyObject_GetAttrString(base, "__class__");
if (metaclass == NULL) {
PyErr_Clear();
metaclass = (PyObject *)base->ob_type;
if (PyDict_SetItemString(dict, "__module__", modname) < 0)
return NULL;
/* Python3 metaclasses */
if (kwargs) {
mkw = PyDict_Copy(kwargs); /* Don't modify kwargs passed in! */
if (!mkw)
return NULL;
metaclass = PyDict_GetItemString(mkw, "metaclass");
if (metaclass) {
Py_INCREF(metaclass);
if (PyDict_DelItemString(mkw, "metaclass") < 0)
goto bad;
if (__Pyx_PrepareClass(metaclass, bases, name, mkw, dict))
goto bad;
}
}
else {
metaclass = (PyObject *) &PyClass_Type;
if (!metaclass) {
/* Python2 __metaclass__ */
metaclass = PyDict_GetItemString(dict, "__metaclass__");
if (!metaclass) {
/* Default metaclass */
#if PY_MAJOR_VERSION < 3
if (PyTuple_Check(bases) && PyTuple_GET_SIZE(bases) > 0) {
PyObject *base = PyTuple_GET_ITEM(bases, 0);
metaclass = PyObject_GetAttrString(base, "__class__");
if (!metaclass) {
PyErr_Clear();
metaclass = (PyObject *)base->ob_type;
}
} else
metaclass = (PyObject *) &PyClass_Type;
#else
if (PyTuple_Check(bases) && PyTuple_GET_SIZE(bases) > 0) {
PyObject *base = PyTuple_GET_ITEM(bases, 0);
metaclass = (PyObject *)base->ob_type;
} else
metaclass = (PyObject *) &PyType_Type;
#endif
}
Py_INCREF(metaclass);
}
result = PyObject_CallFunctionObjArgs(metaclass, name, bases, methods, NULL);
#else
/* it seems that python3+ handle __metaclass__ itself */
result = PyObject_CallFunctionObjArgs((PyObject *)&PyType_Type, name, bases, methods, NULL);
#endif
if (mkw && PyDict_Size(mkw) > 0) {
PyObject *margs = PyTuple_New(3);
if (!margs)
goto bad;
Py_INCREF(name);
Py_INCREF(bases);
Py_INCREF(dict);
PyTuple_SET_ITEM(margs, 0, name);
PyTuple_SET_ITEM(margs, 1, bases);
PyTuple_SET_ITEM(margs, 2, dict);
result = PyEval_CallObjectWithKeywords(metaclass, margs, mkw);
Py_DECREF(margs);
} else {
result = PyObject_CallFunctionObjArgs(metaclass, name, bases, dict, NULL);
}
bad:
Py_DECREF(metaclass);
Py_XDECREF(mkw);
return result;
}
"""
)
...
...
Cython/Compiler/Main.py
View file @
4f208e13
...
...
@@ -102,6 +102,7 @@ class Context(object):
from
ParseTreeTransforms
import
AnalyseDeclarationsTransform
,
AnalyseExpressionsTransform
from
ParseTreeTransforms
import
CreateClosureClasses
,
MarkClosureVisitor
,
DecoratorTransform
from
ParseTreeTransforms
import
InterpretCompilerDirectives
,
TransformBuiltinMethods
from
ParseTreeTransforms
import
ExpandInplaceOperators
from
TypeInference
import
MarkAssignments
,
MarkOverflowingArithmetic
from
ParseTreeTransforms
import
AlignFunctionDefinitions
,
GilCheck
from
AnalysedTreeTransforms
import
AutoTestDictTransform
...
...
@@ -147,6 +148,7 @@ class Context(object):
IntroduceBufferAuxiliaryVars
(
self
),
_check_c_declarations
,
AnalyseExpressionsTransform
(
self
),
ExpandInplaceOperators
(
self
),
OptimizeBuiltinCalls
(
self
),
## Necessary?
IterationTransform
(),
SwitchTransform
(),
...
...
Cython/Compiler/Nodes.py
View file @
4f208e13
...
...
@@ -534,8 +534,9 @@ class CFuncDeclaratorNode(CDeclaratorNode):
if
nonempty
:
nonempty
-=
1
func_type_args
=
[]
for
arg_node
in
self
.
args
:
name_declarator
,
type
=
arg_node
.
analyse
(
env
,
nonempty
=
nonempty
)
for
i
,
arg_node
in
enumerate
(
self
.
args
):
name_declarator
,
type
=
arg_node
.
analyse
(
env
,
nonempty
=
nonempty
,
is_self_arg
=
(
i
==
0
and
env
.
is_c_class_scope
))
name
=
name_declarator
.
name
if
name_declarator
.
cname
:
error
(
self
.
pos
,
...
...
@@ -649,8 +650,9 @@ class CArgDeclNode(Node):
default_value
=
None
annotation
=
None
def
analyse
(
self
,
env
,
nonempty
=
0
):
#print "CArgDeclNode.analyse: is_self_arg =", self.is_self_arg ###
def
analyse
(
self
,
env
,
nonempty
=
0
,
is_self_arg
=
False
):
if
is_self_arg
:
self
.
base_type
.
is_self_arg
=
self
.
is_self_arg
=
True
if
self
.
type
is
None
:
# The parser may missinterpret names as types...
# We fix that here.
...
...
@@ -1600,7 +1602,7 @@ class CFuncDefNode(FuncDefNode):
def
analyse_declarations
(
self
,
env
):
self
.
directive_locals
.
update
(
env
.
directives
[
'locals'
])
base_type
=
self
.
base_type
.
analyse
(
env
)
# The 2 here is because we need both function and argument names.
# The 2 here is because we need both function and argument names.
name_declarator
,
type
=
self
.
declarator
.
analyse
(
base_type
,
env
,
nonempty
=
2
*
(
self
.
body
is
not
None
))
if
not
type
.
is_cfunction
:
error
(
self
.
pos
,
...
...
@@ -1907,13 +1909,16 @@ class DefNode(FuncDefNode):
is_overridable
=
True
)
cfunc
=
CVarDefNode
(
self
.
pos
,
type
=
cfunc_type
)
else
:
if
scope
is
None
:
scope
=
cfunc
.
scope
cfunc_type
=
cfunc
.
type
if
len
(
self
.
args
)
!=
len
(
cfunc_type
.
args
)
or
cfunc_type
.
has_varargs
:
error
(
self
.
pos
,
"wrong number of arguments"
)
error
(
cfunc
.
pos
,
"previous declaration here"
)
for
formal_arg
,
type_arg
in
zip
(
self
.
args
,
cfunc_type
.
args
):
name_declarator
,
type
=
formal_arg
.
analyse
(
cfunc
.
scope
,
nonempty
=
1
)
if
type
is
None
or
type
is
PyrexTypes
.
py_object_type
or
formal_arg
.
is_self
:
for
i
,
(
formal_arg
,
type_arg
)
in
enumerate
(
zip
(
self
.
args
,
cfunc_type
.
args
)):
name_declarator
,
type
=
formal_arg
.
analyse
(
scope
,
nonempty
=
1
,
is_self_arg
=
(
i
==
0
and
scope
.
is_c_class_scope
))
if
type
is
None
or
type
is
PyrexTypes
.
py_object_type
:
formal_arg
.
type
=
type_arg
.
type
formal_arg
.
name_declarator
=
name_declarator
import
ExprNodes
...
...
@@ -2935,7 +2940,8 @@ class PyClassDefNode(ClassDefNode):
child_attrs
=
[
"body"
,
"dict"
,
"classobj"
,
"target"
]
decorators
=
None
def
__init__
(
self
,
pos
,
name
,
bases
,
doc
,
body
,
decorators
=
None
):
def
__init__
(
self
,
pos
,
name
,
bases
,
doc
,
body
,
decorators
=
None
,
keyword_args
=
None
,
starstar_arg
=
None
):
StatNode
.
__init__
(
self
,
pos
)
self
.
name
=
name
self
.
doc
=
doc
...
...
@@ -2950,7 +2956,8 @@ class PyClassDefNode(ClassDefNode):
else
:
doc_node
=
None
self
.
classobj
=
ExprNodes
.
ClassNode
(
pos
,
name
=
name
,
bases
=
bases
,
dict
=
self
.
dict
,
doc
=
doc_node
)
bases
=
bases
,
dict
=
self
.
dict
,
doc
=
doc_node
,
keyword_args
=
keyword_args
,
starstar_arg
=
starstar_arg
)
self
.
target
=
ExprNodes
.
NameNode
(
pos
,
name
=
name
)
def
as_cclass
(
self
):
...
...
@@ -3513,132 +3520,41 @@ class InPlaceAssignmentNode(AssignmentNode):
# (it must be a NameNode, AttributeNode, or IndexNode).
child_attrs
=
[
"lhs"
,
"rhs"
]
dup
=
None
def
analyse_declarations
(
self
,
env
):
self
.
lhs
.
analyse_target_declaration
(
env
)
def
analyse_types
(
self
,
env
):
self
.
dup
=
self
.
create_dup_node
(
env
)
# re-assigns lhs to a shallow copy
self
.
rhs
.
analyse_types
(
env
)
self
.
lhs
.
analyse_target_types
(
env
)
import
ExprNodes
if
self
.
lhs
.
type
.
is_pyobject
:
self
.
rhs
=
self
.
rhs
.
coerce_to_pyobject
(
env
)
elif
self
.
rhs
.
type
.
is_pyobject
or
(
self
.
lhs
.
type
.
is_numeric
and
self
.
rhs
.
type
.
is_numeric
):
self
.
rhs
=
self
.
rhs
.
coerce_to
(
self
.
lhs
.
type
,
env
)
if
self
.
lhs
.
type
.
is_pyobject
:
self
.
result_value_temp
=
ExprNodes
.
PyTempNode
(
self
.
pos
,
env
)
self
.
result_value
=
self
.
result_value_temp
.
coerce_to
(
self
.
lhs
.
type
,
env
)
def
generate_execution_code
(
self
,
code
):
import
ExprNodes
self
.
rhs
.
generate_evaluation_code
(
code
)
self
.
dup
.
generate_subexpr_evaluation_code
(
code
)
if
self
.
dup
.
is_temp
:
self
.
dup
.
allocate_temp_result
(
code
)
# self.dup.generate_result_code is run only if it is not buffer access
if
self
.
operator
==
"**"
:
extra
=
", Py_None"
else
:
extra
=
""
if
self
.
lhs
.
type
.
is_pyobject
:
if
isinstance
(
self
.
lhs
,
ExprNodes
.
IndexNode
)
and
self
.
lhs
.
is_buffer_access
:
self
.
lhs
.
generate_subexpr_evaluation_code
(
code
)
c_op
=
self
.
operator
if
c_op
==
"//"
:
c_op
=
"/"
elif
c_op
==
"**"
:
error
(
self
.
pos
,
"No C inplace power operator"
)
if
isinstance
(
self
.
lhs
,
ExprNodes
.
IndexNode
)
and
self
.
lhs
.
is_buffer_access
:
if
self
.
lhs
.
type
.
is_pyobject
:
error
(
self
.
pos
,
"In-place operators not allowed on object buffers in this release."
)
self
.
dup
.
generate_result_code
(
code
)
self
.
result_value_temp
.
allocate
(
code
)
code
.
putln
(
"%s = %s(%s, %s%s); %s"
%
(
self
.
result_value
.
result
(),
self
.
py_operation_function
(),
self
.
dup
.
py_result
(),
self
.
rhs
.
py_result
(),
extra
,
code
.
error_goto_if_null
(
self
.
result_value
.
py_result
(),
self
.
pos
)))
code
.
put_gotref
(
self
.
result_value
.
py_result
())
self
.
result_value
.
generate_evaluation_code
(
code
)
# May be a type check...
self
.
rhs
.
generate_disposal_code
(
code
)
self
.
rhs
.
free_temps
(
code
)
self
.
dup
.
generate_disposal_code
(
code
)
self
.
dup
.
free_temps
(
code
)
self
.
lhs
.
generate_assignment_code
(
self
.
result_value
,
code
)
self
.
result_value_temp
.
release
(
code
)
else
:
c_op
=
self
.
operator
if
c_op
==
"//"
:
c_op
=
"/"
elif
c_op
==
"**"
:
error
(
self
.
pos
,
"No C inplace power operator"
)
elif
self
.
lhs
.
type
.
is_complex
:
error
(
self
.
pos
,
"Inplace operators not implemented for complex types."
)
# have to do assignment directly to avoid side-effects
if
isinstance
(
self
.
lhs
,
ExprNodes
.
IndexNode
)
and
self
.
lhs
.
is_buffer_access
:
self
.
lhs
.
generate_buffer_setitem_code
(
self
.
rhs
,
code
,
c_op
)
else
:
self
.
dup
.
generate_result_code
(
code
)
code
.
putln
(
"%s %s= %s;"
%
(
self
.
lhs
.
result
(),
c_op
,
self
.
rhs
.
result
())
)
self
.
rhs
.
generate_disposal_code
(
code
)
self
.
rhs
.
free_temps
(
code
)
if
self
.
dup
.
is_temp
:
self
.
dup
.
generate_subexpr_disposal_code
(
code
)
self
.
dup
.
free_subexpr_temps
(
code
)
def
create_dup_node
(
self
,
env
):
import
ExprNodes
self
.
dup
=
self
.
lhs
self
.
dup
.
analyse_types
(
env
)
if
isinstance
(
self
.
lhs
,
ExprNodes
.
NameNode
):
target_lhs
=
ExprNodes
.
NameNode
(
self
.
dup
.
pos
,
name
=
self
.
dup
.
name
,
is_temp
=
self
.
dup
.
is_temp
,
entry
=
self
.
dup
.
entry
)
elif
isinstance
(
self
.
lhs
,
ExprNodes
.
AttributeNode
):
target_lhs
=
ExprNodes
.
AttributeNode
(
self
.
dup
.
pos
,
obj
=
ExprNodes
.
CloneNode
(
self
.
lhs
.
obj
),
attribute
=
self
.
dup
.
attribute
,
is_temp
=
self
.
dup
.
is_temp
)
elif
isinstance
(
self
.
lhs
,
ExprNodes
.
IndexNode
):
if
self
.
lhs
.
index
:
index
=
ExprNodes
.
CloneNode
(
self
.
lhs
.
index
)
else
:
index
=
None
if
self
.
lhs
.
indices
:
indices
=
[
ExprNodes
.
CloneNode
(
x
)
for
x
in
self
.
lhs
.
indices
]
else
:
indices
=
[]
target_lhs
=
ExprNodes
.
IndexNode
(
self
.
dup
.
pos
,
base
=
ExprNodes
.
CloneNode
(
self
.
dup
.
base
),
index
=
index
,
indices
=
indices
,
is_temp
=
self
.
dup
.
is_temp
)
if
c_op
in
(
'/'
,
'%'
)
and
self
.
lhs
.
type
.
is_int
and
not
code
.
directives
[
'cdivision'
]:
error
(
self
.
pos
,
"In-place non-c divide operators not allowed on int buffers."
)
self
.
lhs
.
generate_buffer_setitem_code
(
self
.
rhs
,
code
,
c_op
)
else
:
assert
False
,
"Unsupported node: %s"
%
type
(
self
.
lhs
)
self
.
lhs
=
target_lhs
return
self
.
dup
def
py_operation_function
(
self
):
return
self
.
py_functions
[
self
.
operator
]
py_functions
=
{
"|"
:
"PyNumber_InPlaceOr"
,
"^"
:
"PyNumber_InPlaceXor"
,
"&"
:
"PyNumber_InPlaceAnd"
,
"+"
:
"PyNumber_InPlaceAdd"
,
"-"
:
"PyNumber_InPlaceSubtract"
,
"*"
:
"PyNumber_InPlaceMultiply"
,
"/"
:
"__Pyx_PyNumber_InPlaceDivide"
,
"%"
:
"PyNumber_InPlaceRemainder"
,
"<<"
:
"PyNumber_InPlaceLshift"
,
">>"
:
"PyNumber_InPlaceRshift"
,
"**"
:
"PyNumber_InPlacePower"
,
"//"
:
"PyNumber_InPlaceFloorDivide"
,
}
# C++
# TODO: make sure overload is declared
code
.
putln
(
"%s %s= %s;"
%
(
self
.
lhs
.
result
(),
c_op
,
self
.
rhs
.
result
()))
self
.
lhs
.
generate_subexpr_disposal_code
(
code
)
self
.
lhs
.
free_subexpr_temps
(
code
)
self
.
rhs
.
generate_disposal_code
(
code
)
self
.
rhs
.
free_temps
(
code
)
def
annotate
(
self
,
code
):
self
.
lhs
.
annotate
(
code
)
self
.
rhs
.
annotate
(
code
)
self
.
dup
.
annotate
(
code
)
def
create_binop_node
(
self
):
import
ExprNodes
...
...
Cython/Compiler/Optimize.py
View file @
4f208e13
...
...
@@ -73,6 +73,7 @@ class IterationTransform(Visitor.VisitorTransform):
def
visit_ModuleNode
(
self
,
node
):
self
.
current_scope
=
node
.
scope
self
.
module_scope
=
node
.
scope
self
.
visitchildren
(
node
)
return
node
...
...
@@ -168,12 +169,13 @@ class IterationTransform(Visitor.VisitorTransform):
dict_obj
=
function
.
obj
method
=
function
.
attribute
is_py3
=
self
.
module_scope
.
context
.
language_level
>=
3
keys
=
values
=
False
if
method
==
'iterkeys'
:
if
method
==
'iterkeys'
or
(
is_py3
and
method
==
'keys'
)
:
keys
=
True
elif
method
==
'itervalues'
:
elif
method
==
'itervalues'
or
(
is_py3
and
method
==
'values'
)
:
values
=
True
elif
method
==
'iteritems'
:
elif
method
==
'iteritems'
or
(
is_py3
and
method
==
'items'
)
:
keys
=
values
=
True
else
:
return
node
...
...
@@ -1819,68 +1821,6 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
### builtin functions
PyObject_GetAttr2_func_type
=
PyrexTypes
.
CFuncType
(
PyrexTypes
.
py_object_type
,
[
PyrexTypes
.
CFuncTypeArg
(
"object"
,
PyrexTypes
.
py_object_type
,
None
),
PyrexTypes
.
CFuncTypeArg
(
"attr_name"
,
PyrexTypes
.
py_object_type
,
None
),
])
PyObject_GetAttr3_func_type
=
PyrexTypes
.
CFuncType
(
PyrexTypes
.
py_object_type
,
[
PyrexTypes
.
CFuncTypeArg
(
"object"
,
PyrexTypes
.
py_object_type
,
None
),
PyrexTypes
.
CFuncTypeArg
(
"attr_name"
,
PyrexTypes
.
py_object_type
,
None
),
PyrexTypes
.
CFuncTypeArg
(
"default"
,
PyrexTypes
.
py_object_type
,
None
),
])
def
_handle_simple_function_getattr
(
self
,
node
,
pos_args
):
"""Replace 2/3 argument forms of getattr() by C-API calls.
"""
if
len
(
pos_args
)
==
2
:
return
ExprNodes
.
PythonCapiCallNode
(
node
.
pos
,
"PyObject_GetAttr"
,
self
.
PyObject_GetAttr2_func_type
,
args
=
pos_args
,
may_return_none
=
True
,
is_temp
=
node
.
is_temp
)
elif
len
(
pos_args
)
==
3
:
return
ExprNodes
.
PythonCapiCallNode
(
node
.
pos
,
"__Pyx_GetAttr3"
,
self
.
PyObject_GetAttr3_func_type
,
args
=
pos_args
,
may_return_none
=
True
,
is_temp
=
node
.
is_temp
,
utility_code
=
Builtin
.
getattr3_utility_code
)
else
:
self
.
_error_wrong_arg_count
(
'getattr'
,
node
,
pos_args
,
'2 or 3'
)
return
node
PyObject_GetIter_func_type
=
PyrexTypes
.
CFuncType
(
PyrexTypes
.
py_object_type
,
[
PyrexTypes
.
CFuncTypeArg
(
"object"
,
PyrexTypes
.
py_object_type
,
None
),
])
PyCallIter_New_func_type
=
PyrexTypes
.
CFuncType
(
PyrexTypes
.
py_object_type
,
[
PyrexTypes
.
CFuncTypeArg
(
"object"
,
PyrexTypes
.
py_object_type
,
None
),
PyrexTypes
.
CFuncTypeArg
(
"sentinel"
,
PyrexTypes
.
py_object_type
,
None
),
])
def
_handle_simple_function_iter
(
self
,
node
,
pos_args
):
"""Replace 1/2 argument forms of iter() by C-API calls.
"""
if
len
(
pos_args
)
==
1
:
return
ExprNodes
.
PythonCapiCallNode
(
node
.
pos
,
"PyObject_GetIter"
,
self
.
PyObject_GetIter_func_type
,
args
=
pos_args
,
may_return_none
=
True
,
is_temp
=
node
.
is_temp
)
elif
len
(
pos_args
)
==
2
:
return
ExprNodes
.
PythonCapiCallNode
(
node
.
pos
,
"PyCallIter_New"
,
self
.
PyCallIter_New_func_type
,
args
=
pos_args
,
is_temp
=
node
.
is_temp
)
else
:
self
.
_error_wrong_arg_count
(
'iter'
,
node
,
pos_args
,
'1 or 2'
)
return
node
Pyx_strlen_func_type
=
PyrexTypes
.
CFuncType
(
PyrexTypes
.
c_size_t_type
,
[
PyrexTypes
.
CFuncTypeArg
(
"bytes"
,
PyrexTypes
.
c_char_ptr_type
,
None
)
...
...
@@ -1916,7 +1856,7 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
node
.
pos
,
"strlen"
,
self
.
Pyx_strlen_func_type
,
args
=
[
arg
],
is_temp
=
node
.
is_temp
,
utility_code
=
include_string_h_utility_code
)
utility_code
=
Builtin
.
include_string_h_utility_code
)
elif
arg
.
type
.
is_pyobject
:
cfunc_name
=
self
.
_map_to_capi_len_function
(
arg
.
type
)
if
cfunc_name
is
None
:
...
...
@@ -2125,23 +2065,6 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
_handle_simple_method_list_pop
=
_handle_simple_method_object_pop
PyList_Append_func_type
=
PyrexTypes
.
CFuncType
(
PyrexTypes
.
c_int_type
,
[
PyrexTypes
.
CFuncTypeArg
(
"list"
,
PyrexTypes
.
py_object_type
,
None
),
PyrexTypes
.
CFuncTypeArg
(
"item"
,
PyrexTypes
.
py_object_type
,
None
),
],
exception_value
=
"-1"
)
def
_handle_simple_method_list_append
(
self
,
node
,
args
,
is_unbound_method
):
"""Call PyList_Append() instead of l.append().
"""
if
len
(
args
)
!=
2
:
self
.
_error_wrong_arg_count
(
'list.append'
,
node
,
args
,
2
)
return
node
return
self
.
_substitute_method_call
(
node
,
"PyList_Append"
,
self
.
PyList_Append_func_type
,
'append'
,
is_unbound_method
,
args
)
single_param_func_type
=
PyrexTypes
.
CFuncType
(
PyrexTypes
.
c_int_type
,
[
PyrexTypes
.
CFuncTypeArg
(
"obj"
,
PyrexTypes
.
py_object_type
,
None
),
...
...
@@ -2157,16 +2080,6 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
node
,
"PyList_Sort"
,
self
.
single_param_func_type
,
'sort'
,
is_unbound_method
,
args
)
def
_handle_simple_method_list_reverse
(
self
,
node
,
args
,
is_unbound_method
):
"""Call PyList_Reverse() instead of l.reverse().
"""
if
len
(
args
)
!=
1
:
self
.
_error_wrong_arg_count
(
'list.reverse'
,
node
,
args
,
1
)
return
node
return
self
.
_substitute_method_call
(
node
,
"PyList_Reverse"
,
self
.
single_param_func_type
,
'reverse'
,
is_unbound_method
,
args
)
Pyx_PyDict_GetItem_func_type
=
PyrexTypes
.
CFuncType
(
PyrexTypes
.
py_object_type
,
[
PyrexTypes
.
CFuncTypeArg
(
"dict"
,
PyrexTypes
.
py_object_type
,
None
),
...
...
@@ -2276,24 +2189,6 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
node
,
"PyUnicode_Splitlines"
,
self
.
PyUnicode_Splitlines_func_type
,
'splitlines'
,
is_unbound_method
,
args
)
PyUnicode_Join_func_type
=
PyrexTypes
.
CFuncType
(
Builtin
.
unicode_type
,
[
PyrexTypes
.
CFuncTypeArg
(
"sep"
,
Builtin
.
unicode_type
,
None
),
PyrexTypes
.
CFuncTypeArg
(
"iterable"
,
PyrexTypes
.
py_object_type
,
None
),
])
def
_handle_simple_method_unicode_join
(
self
,
node
,
args
,
is_unbound_method
):
"""Replace unicode.join(...) by a direct call to the
corresponding C-API function.
"""
if
len
(
args
)
!=
2
:
self
.
_error_wrong_arg_count
(
'unicode.join'
,
node
,
args
,
2
)
return
node
return
self
.
_substitute_method_call
(
node
,
"PyUnicode_Join"
,
self
.
PyUnicode_Join_func_type
,
'join'
,
is_unbound_method
,
args
)
PyUnicode_Split_func_type
=
PyrexTypes
.
CFuncType
(
Builtin
.
list_type
,
[
PyrexTypes
.
CFuncTypeArg
(
"str"
,
Builtin
.
unicode_type
,
None
),
...
...
@@ -2576,7 +2471,7 @@ class OptimizeBuiltinCalls(Visitor.EnvTransform):
string_node
.
pos
,
"strlen"
,
self
.
Pyx_strlen_func_type
,
args
=
[
string_node
],
is_temp
=
False
,
utility_code
=
include_string_h_utility_code
,
utility_code
=
Builtin
.
include_string_h_utility_code
,
).
coerce_to
(
PyrexTypes
.
c_py_ssize_t_type
,
self
.
current_env
())
elif
start
:
stop
=
ExprNodes
.
SubNode
(
...
...
@@ -2956,13 +2851,6 @@ static CYTHON_INLINE char __Pyx_PyBytes_GetItemInt(PyObject* bytes, Py_ssize_t i
)
include_string_h_utility_code
=
UtilityCode
(
proto
=
"""
#include <string.h>
"""
)
tpnew_utility_code
=
UtilityCode
(
proto
=
"""
static CYTHON_INLINE PyObject* __Pyx_tp_new(PyObject* type_obj) {
...
...
@@ -2977,6 +2865,17 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations):
"""Calculate the result of constant expressions to store it in
``expr_node.constant_result``, and replace trivial cases by their
constant result.
General rules:
- We calculate float constants to make them available to the
compiler, but we do not aggregate them into a single literal
node to prevent any loss of precision.
- We recursively calculate constants from non-literal nodes to
make them available to the compiler, but we only aggregate
literal nodes at each step. Non-literal nodes are never merged
into a single node.
"""
def
_calculate_const
(
self
,
node
):
if
node
.
constant_result
is
not
ExprNodes
.
constant_value_not_set
:
...
...
@@ -3009,8 +2908,8 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations):
import
traceback
,
sys
traceback
.
print_exc
(
file
=
sys
.
stdout
)
NODE_TYPE_ORDER
=
(
ExprNodes
.
CharNode
,
ExprNodes
.
IntNode
,
ExprNodes
.
LongNode
,
ExprNodes
.
FloatNode
)
NODE_TYPE_ORDER
=
[
ExprNodes
.
CharNode
,
ExprNodes
.
IntNode
,
ExprNodes
.
LongNode
,
ExprNodes
.
FloatNode
]
def
_widest_node_class
(
self
,
*
nodes
):
try
:
...
...
@@ -3023,14 +2922,41 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations):
self
.
_calculate_const
(
node
)
return
node
def
visit_UnaryMinusNode
(
self
,
node
):
self
.
_calculate_const
(
node
)
if
node
.
constant_result
is
ExprNodes
.
not_a_constant
:
return
node
if
not
node
.
operand
.
is_literal
:
return
node
if
isinstance
(
node
.
operand
,
ExprNodes
.
LongNode
):
return
ExprNodes
.
LongNode
(
node
.
pos
,
value
=
'-'
+
node
.
operand
.
value
,
constant_result
=
node
.
constant_result
)
if
isinstance
(
node
.
operand
,
ExprNodes
.
FloatNode
):
# this is a safe operation
return
ExprNodes
.
FloatNode
(
node
.
pos
,
value
=
'-'
+
node
.
operand
.
value
,
constant_result
=
node
.
constant_result
)
node_type
=
node
.
operand
.
type
if
node_type
.
is_int
and
node_type
.
signed
or
\
isinstance
(
node
.
operand
,
ExprNodes
.
IntNode
)
and
node_type
.
is_pyobject
:
return
ExprNodes
.
IntNode
(
node
.
pos
,
value
=
'-'
+
node
.
operand
.
value
,
type
=
node_type
,
longness
=
node
.
operand
.
longness
,
constant_result
=
node
.
constant_result
)
return
node
def
visit_UnaryPlusNode
(
self
,
node
):
self
.
_calculate_const
(
node
)
if
node
.
constant_result
is
ExprNodes
.
not_a_constant
:
return
node
if
node
.
constant_result
==
node
.
operand
.
constant_result
:
return
node
.
operand
return
node
def
visit_BoolBinopNode
(
self
,
node
):
self
.
_calculate_const
(
node
)
if
node
.
constant_result
is
ExprNodes
.
not_a_constant
:
return
node
if
not
node
.
operand1
.
is_literal
or
not
node
.
operand2
.
is_literal
:
# We calculate other constants to make them available to
# the compiler, but we only aggregate constant nodes
# recursively, so non-const nodes are straight out.
return
node
if
node
.
constant_result
==
node
.
operand1
.
constant_result
and
node
.
operand1
.
is_literal
:
...
...
@@ -3046,14 +2972,8 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations):
if
node
.
constant_result
is
ExprNodes
.
not_a_constant
:
return
node
if
isinstance
(
node
.
constant_result
,
float
):
# We calculate float constants to make them available to
# the compiler, but we do not aggregate them into a
# constant node to prevent any loss of precision.
return
node
if
not
node
.
operand1
.
is_literal
or
not
node
.
operand2
.
is_literal
:
# We calculate other constants to make them available to
# the compiler, but we only aggregate constant nodes
# recursively, so non-const nodes are straight out.
return
node
# now inject a new constant node with the calculated value
...
...
@@ -3064,30 +2984,36 @@ class ConstantFolding(Visitor.VisitorTransform, SkipDeclarations):
except
AttributeError
:
return
node
if
type1
is
type2
:
new_node
=
node
.
operand1
else
:
if
type1
.
is_numeric
and
type2
.
is_numeric
:
widest_type
=
PyrexTypes
.
widest_numeric_type
(
type1
,
type2
)
if
type
(
node
.
operand1
)
is
type
(
node
.
operand2
):
new_node
=
node
.
operand1
new_node
.
type
=
widest_type
elif
type1
is
widest_type
:
new_node
=
node
.
operand1
elif
type2
is
widest_type
:
new_node
=
node
.
operand2
else
:
widest_type
=
PyrexTypes
.
py_object_type
target_class
=
self
.
_widest_node_class
(
node
.
operand1
,
node
.
operand2
)
if
target_class
is
None
:
return
node
elif
target_class
is
ExprNodes
.
IntNode
:
unsigned
=
getattr
(
node
.
operand1
,
'unsigned'
,
''
)
and
\
getattr
(
node
.
operand2
,
'unsigned'
,
''
)
longness
=
"LL"
[:
max
(
len
(
getattr
(
node
.
operand1
,
'longness'
,
''
)),
len
(
getattr
(
node
.
operand2
,
'longness'
,
''
)))]
new_node
=
ExprNodes
.
IntNode
(
pos
=
node
.
pos
,
unsigned
=
unsigned
,
longness
=
longness
,
value
=
str
(
node
.
constant_result
),
constant_result
=
node
.
constant_result
)
# IntNode is smart about the type it chooses, so we just
# make sure we were not smarter this time
if
widest_type
.
is_pyobject
or
new_node
.
type
.
is_pyobject
:
new_node
.
type
=
PyrexTypes
.
py_object_type
else
:
target_class
=
self
.
_widest_node_class
(
node
.
operand1
,
node
.
operand2
)
if
target_class
is
None
:
return
node
new_node
=
target_class
(
pos
=
node
.
pos
,
type
=
widest_type
)
new_node
.
constant_result
=
node
.
constant_result
if
isinstance
(
node
,
ExprNodes
.
BoolNode
):
new_node
.
value
=
node
.
constant_result
new_node
.
type
=
PyrexTypes
.
widest_numeric_type
(
widest_type
,
new_node
.
type
)
else
:
new_node
.
value
=
str
(
node
.
constant_result
)
#new_node = new_node.coerce_to(node.type, self.current_scope)
if
isinstance
(
node
,
ExprNodes
.
BoolNode
):
node_value
=
node
.
constant_result
else
:
node_value
=
str
(
node
.
constant_result
)
new_node
=
target_class
(
pos
=
node
.
pos
,
type
=
widest_type
,
value
=
node_value
,
constant_result
=
node
.
constant_result
)
return
new_node
def
visit_PrimaryCmpNode
(
self
,
node
):
...
...
Cython/Compiler/ParseTreeTransforms.py
View file @
4f208e13
from
Cython.Compiler.Visitor
import
VisitorTransform
,
TreeVisitor
from
Cython.Compiler.Visitor
import
CythonTransform
,
EnvTransform
from
Cython.Compiler.Visitor
import
CythonTransform
,
EnvTransform
,
ScopeTrackingTransform
from
Cython.Compiler.ModuleNode
import
ModuleNode
from
Cython.Compiler.Nodes
import
*
from
Cython.Compiler.ExprNodes
import
*
...
...
@@ -133,7 +133,7 @@ class PostParseError(CompileError): pass
ERR_CDEF_INCLASS
=
'Cannot assign default value to fields in cdef classes, structs or unions'
ERR_BUF_DEFAULTS
=
'Invalid buffer defaults specification (see docs)'
ERR_INVALID_SPECIALATTR_TYPE
=
'Special attributes must not have a type declared'
class
PostParse
(
Cython
Transform
):
class
PostParse
(
ScopeTracking
Transform
):
"""
Basic interpretation of the parse tree, as well as validity
checking that can be done on a very basic level on the parse
...
...
@@ -168,9 +168,6 @@ class PostParse(CythonTransform):
if a more pure Abstract Syntax Tree is wanted.
"""
# Track our context.
scope_type
=
None
# can be either of 'module', 'function', 'class'
def
__init__
(
self
,
context
):
super
(
PostParse
,
self
).
__init__
(
context
)
self
.
specialattribute_handlers
=
{
...
...
@@ -178,28 +175,8 @@ class PostParse(CythonTransform):
}
def
visit_ModuleNode
(
self
,
node
):
self
.
scope_type
=
'module'
self
.
scope_node
=
node
self
.
lambda_counter
=
1
self
.
visitchildren
(
node
)
return
node
def
visit_scope
(
self
,
node
,
scope_type
):
prev
=
self
.
scope_type
,
self
.
scope_node
self
.
scope_type
=
scope_type
self
.
scope_node
=
node
self
.
visitchildren
(
node
)
self
.
scope_type
,
self
.
scope_node
=
prev
return
node
def
visit_ClassDefNode
(
self
,
node
):
return
self
.
visit_scope
(
node
,
'class'
)
def
visit_FuncDefNode
(
self
,
node
):
return
self
.
visit_scope
(
node
,
'function'
)
def
visit_CStructOrUnionDefNode
(
self
,
node
):
return
self
.
visit_scope
(
node
,
'struct'
)
return
super
(
PostParse
,
self
).
visit_ModuleNode
(
node
)
def
visit_LambdaNode
(
self
,
node
):
# unpack a lambda expression into the corresponding DefNode
...
...
@@ -242,7 +219,7 @@ class PostParse(CythonTransform):
declbase
=
declbase
.
base
if
isinstance
(
declbase
,
CNameDeclaratorNode
):
if
declbase
.
default
is
not
None
:
if
self
.
scope_type
in
(
'class'
,
'struct'
):
if
self
.
scope_type
in
(
'c
class'
,
'pyc
lass'
,
'struct'
):
if
isinstance
(
self
.
scope_node
,
CClassDefNode
):
handler
=
self
.
specialattribute_handlers
.
get
(
decl
.
name
)
if
handler
:
...
...
@@ -1197,7 +1174,60 @@ class AnalyseExpressionsTransform(CythonTransform):
node
.
analyse_scoped_expressions
(
node
.
expr_scope
)
self
.
visitchildren
(
node
)
return
node
class
ExpandInplaceOperators
(
EnvTransform
):
def
visit_InPlaceAssignmentNode
(
self
,
node
):
lhs
=
node
.
lhs
rhs
=
node
.
rhs
if
lhs
.
type
.
is_cpp_class
:
# No getting around this exact operator here.
return
node
if
isinstance
(
lhs
,
IndexNode
)
and
lhs
.
is_buffer_access
:
# There is code to handle this case.
return
node
def
side_effect_free_reference
(
node
,
setting
=
False
):
if
isinstance
(
node
,
NameNode
):
return
node
,
[]
elif
node
.
type
.
is_pyobject
and
not
setting
:
node
=
LetRefNode
(
node
)
return
node
,
[
node
]
elif
isinstance
(
node
,
IndexNode
):
if
node
.
is_buffer_access
:
raise
ValueError
,
"Buffer access"
base
,
temps
=
side_effect_free_reference
(
node
.
base
)
index
=
LetRefNode
(
node
.
index
)
return
IndexNode
(
node
.
pos
,
base
=
base
,
index
=
index
),
temps
+
[
index
]
elif
isinstance
(
node
,
AttributeNode
):
obj
,
temps
=
side_effect_free_reference
(
node
.
obj
)
return
AttributeNode
(
node
.
pos
,
obj
=
obj
,
attribute
=
node
.
attribute
),
temps
else
:
node
=
LetRefNode
(
node
)
return
node
,
[
node
]
try
:
lhs
,
let_ref_nodes
=
side_effect_free_reference
(
lhs
,
setting
=
True
)
except
ValueError
:
return
node
dup
=
lhs
.
__class__
(
**
lhs
.
__dict__
)
binop
=
binop_node
(
node
.
pos
,
operator
=
node
.
operator
,
operand1
=
dup
,
operand2
=
rhs
,
inplace
=
True
)
node
=
SingleAssignmentNode
(
node
.
pos
,
lhs
=
lhs
,
rhs
=
binop
)
# Use LetRefNode to avoid side effects.
let_ref_nodes
.
reverse
()
for
t
in
let_ref_nodes
:
node
=
LetNode
(
t
,
node
)
node
.
analyse_expressions
(
self
.
current_env
())
return
node
def
visit_ExprNode
(
self
,
node
):
# In-place assignments can't happen within an expression.
return
node
class
AlignFunctionDefinitions
(
CythonTransform
):
"""
This class takes the signatures from a .pxd file and applies them to
...
...
@@ -1236,15 +1266,11 @@ class AlignFunctionDefinitions(CythonTransform):
def
visit_DefNode
(
self
,
node
):
pxd_def
=
self
.
scope
.
lookup
(
node
.
name
)
if
pxd_def
:
if
self
.
scope
.
is_c_class_scope
and
len
(
pxd_def
.
type
.
args
)
>
0
:
# The self parameter type needs adjusting.
pxd_def
.
type
.
args
[
0
].
type
=
self
.
scope
.
parent_type
if
pxd_def
.
is_cfunction
:
node
=
node
.
as_cfunction
(
pxd_def
)
else
:
if
not
pxd_def
.
is_cfunction
:
error
(
node
.
pos
,
"'%s' redeclared"
%
node
.
name
)
error
(
pxd_def
.
pos
,
"previous declaration here"
)
return
None
node
=
node
.
as_cfunction
(
pxd_def
)
elif
self
.
scope
.
is_module_scope
and
self
.
directives
[
'auto_cpdef'
]:
node
=
node
.
as_cfunction
(
scope
=
self
.
scope
)
# Enable this when internal def functions are allowed.
...
...
Cython/Compiler/Parsing.pxd
View file @
4f208e13
...
...
@@ -35,6 +35,8 @@ cpdef p_yield_statement(PyrexScanner s)
cpdef
p_power
(
PyrexScanner
s
)
cpdef
p_new_expr
(
PyrexScanner
s
)
cpdef
p_trailer
(
PyrexScanner
s
,
node1
)
cpdef
p_call_parse_args
(
PyrexScanner
s
,
bint
allow_genexp
=
*
)
cpdef
p_call_build_packed_args
(
pos
,
positional_args
,
keyword_args
,
star_arg
)
cpdef
p_call
(
PyrexScanner
s
,
function
)
cpdef
p_index
(
PyrexScanner
s
,
base
)
cpdef
p_subscript_list
(
PyrexScanner
s
)
...
...
Cython/Compiler/Parsing.py
View file @
4f208e13
...
...
@@ -381,7 +381,7 @@ def p_trailer(s, node1):
# arglist: argument (',' argument)* [',']
# argument: [test '='] test # Really [keyword '='] test
def
p_call
(
s
,
function
):
def
p_call
_parse_args
(
s
,
allow_genexp
=
True
):
# s.sy == '('
pos
=
s
.
position
()
s
.
next
()
...
...
@@ -428,29 +428,43 @@ def p_call(s, function):
if
s
.
sy
==
','
:
s
.
next
()
s
.
expect
(
')'
)
return
positional_args
,
keyword_args
,
star_arg
,
starstar_arg
def
p_call_build_packed_args
(
pos
,
positional_args
,
keyword_args
,
star_arg
):
arg_tuple
=
None
keyword_dict
=
None
if
positional_args
or
not
star_arg
:
arg_tuple
=
ExprNodes
.
TupleNode
(
pos
,
args
=
positional_args
)
if
star_arg
:
star_arg_tuple
=
ExprNodes
.
AsTupleNode
(
pos
,
arg
=
star_arg
)
if
arg_tuple
:
arg_tuple
=
ExprNodes
.
binop_node
(
pos
,
operator
=
'+'
,
operand1
=
arg_tuple
,
operand2
=
star_arg_tuple
)
else
:
arg_tuple
=
star_arg_tuple
if
keyword_args
:
keyword_args
=
[
ExprNodes
.
DictItemNode
(
pos
=
key
.
pos
,
key
=
key
,
value
=
value
)
for
key
,
value
in
keyword_args
]
keyword_dict
=
ExprNodes
.
DictNode
(
pos
,
key_value_pairs
=
keyword_args
)
return
arg_tuple
,
keyword_dict
def
p_call
(
s
,
function
):
# s.sy == '('
pos
=
s
.
position
()
positional_args
,
keyword_args
,
star_arg
,
starstar_arg
=
\
p_call_parse_args
(
s
)
if
not
(
keyword_args
or
star_arg
or
starstar_arg
):
return
ExprNodes
.
SimpleCallNode
(
pos
,
function
=
function
,
args
=
positional_args
)
else
:
arg_tuple
=
None
keyword_dict
=
None
if
positional_args
or
not
star_arg
:
arg_tuple
=
ExprNodes
.
TupleNode
(
pos
,
args
=
positional_args
)
if
star_arg
:
star_arg_tuple
=
ExprNodes
.
AsTupleNode
(
pos
,
arg
=
star_arg
)
if
arg_tuple
:
arg_tuple
=
ExprNodes
.
binop_node
(
pos
,
operator
=
'+'
,
operand1
=
arg_tuple
,
operand2
=
star_arg_tuple
)
else
:
arg_tuple
=
star_arg_tuple
if
keyword_args
:
keyword_args
=
[
ExprNodes
.
DictItemNode
(
pos
=
key
.
pos
,
key
=
key
,
value
=
value
)
for
key
,
value
in
keyword_args
]
keyword_dict
=
ExprNodes
.
DictNode
(
pos
,
key_value_pairs
=
keyword_args
)
arg_tuple
,
keyword_dict
=
p_call_build_packed_args
(
pos
,
positional_args
,
keyword_args
,
star_arg
)
return
ExprNodes
.
GeneralCallNode
(
pos
,
function
=
function
,
positional_args
=
arg_tuple
,
...
...
@@ -2607,16 +2621,23 @@ def p_class_statement(s, decorators):
s
.
next
()
class_name
=
EncodedString
(
p_ident
(
s
)
)
class_name
.
encoding
=
s
.
source_encoding
arg_tuple
=
None
keyword_dict
=
None
starstar_arg
=
None
if
s
.
sy
==
'('
:
s
.
next
()
base_list
=
p_simple_expr_list
(
s
)
s
.
expect
(
')'
)
else
:
base_list
=
[]
positional_args
,
keyword_args
,
star_arg
,
starstar_arg
=
\
p_call_parse_args
(
s
,
allow_genexp
=
False
)
arg_tuple
,
keyword_dict
=
p_call_build_packed_args
(
pos
,
positional_args
,
keyword_args
,
star_arg
)
if
arg_tuple
is
None
:
# XXX: empty arg_tuple
arg_tuple
=
ExprNodes
.
TupleNode
(
pos
,
args
=
[])
doc
,
body
=
p_suite
(
s
,
Ctx
(
level
=
'class'
),
with_doc
=
1
)
return
Nodes
.
PyClassDefNode
(
pos
,
name
=
class_name
,
bases
=
ExprNodes
.
TupleNode
(
pos
,
args
=
base_list
),
bases
=
arg_tuple
,
keyword_args
=
keyword_dict
,
starstar_arg
=
starstar_arg
,
doc
=
doc
,
body
=
body
,
decorators
=
decorators
)
def
p_c_class_definition
(
s
,
pos
,
ctx
):
...
...
Cython/Compiler/PyrexTypes.py
View file @
4f208e13
...
...
@@ -379,14 +379,8 @@ class BuiltinObjectType(PyObjectType):
base_type
=
None
module_name
=
'__builtin__'
alternative_name
=
None
# used for str/bytes duality
def
__init__
(
self
,
name
,
cname
):
self
.
name
=
name
if
name
==
'str'
:
self
.
alternative_name
=
'bytes'
elif
name
==
'bytes'
:
self
.
alternative_name
=
'str'
self
.
cname
=
cname
self
.
typeptr_cname
=
"&"
+
cname
...
...
@@ -403,9 +397,7 @@ class BuiltinObjectType(PyObjectType):
def
assignable_from
(
self
,
src_type
):
if
isinstance
(
src_type
,
BuiltinObjectType
):
return
src_type
.
name
==
self
.
name
or
(
src_type
.
name
==
self
.
alternative_name
and
src_type
.
name
is
not
None
)
return
src_type
.
name
==
self
.
name
elif
src_type
.
is_extension_type
:
return
(
src_type
.
module_name
==
'__builtin__'
and
src_type
.
name
==
self
.
name
)
...
...
Cython/Compiler/Scanning.py
View file @
4f208e13
...
...
@@ -358,8 +358,11 @@ class PyrexScanner(Scanner):
self
.
error
(
"Unrecognized character"
)
if
sy
==
IDENT
:
if
systring
in
self
.
keywords
:
if
systring
==
'print'
and
\
print_function
in
self
.
context
.
future_directives
:
if
systring
==
'print'
and
print_function
in
self
.
context
.
future_directives
:
self
.
keywords
.
remove
(
'print'
)
systring
=
EncodedString
(
systring
)
elif
systring
==
'exec'
and
self
.
context
.
language_level
>=
3
:
self
.
keywords
.
remove
(
'exec'
)
systring
=
EncodedString
(
systring
)
else
:
sy
=
systring
...
...
@@ -416,7 +419,11 @@ class PyrexScanner(Scanner):
if
message
:
self
.
error
(
message
)
else
:
self
.
error
(
"Expected '%s'"
%
what
)
if
self
.
sy
==
IDENT
:
found
=
self
.
systring
else
:
found
=
self
.
sy
self
.
error
(
"Expected '%s', found '%s'"
%
(
what
,
found
))
def
expect_indent
(
self
):
self
.
expect
(
'INDENT'
,
...
...
Cython/Compiler/Symtab.py
View file @
4f208e13
...
...
@@ -70,7 +70,7 @@ class Entry(object):
# or class attribute during
# class construction
# is_member boolean Is an assigned class member
# is_
real_dict boolean Is a real dict, PyClass attributes dict
# is_
pyclass_attr boolean Is a name in a Python class namespace
# is_variable boolean Is a variable
# is_cfunction boolean Is a C function
# is_cmethod boolean Is a C method of an extension type
...
...
@@ -132,7 +132,7 @@ class Entry(object):
is_cglobal = 0
is_pyglobal = 0
is_member = 0
is_
real_dict
= 0
is_
pyclass_attr
= 0
is_variable = 0
is_cfunction = 0
is_cmethod = 0
...
...
@@ -539,7 +539,7 @@ class Scope(object):
def declare_cfunction(self, name, type, pos,
cname = None, visibility = 'private', defining = 0,
api = 0, in_pxd = 0, modifiers = ()):
api = 0, in_pxd = 0, modifiers = ()
, utility_code = None
):
# Add an entry for a C function.
if not cname:
if api or visibility != 'private':
...
...
@@ -552,7 +552,18 @@ class Scope(object):
warning(pos, "
Function
'%s'
previously
declared
as
'%s'" % (name, entry.visibility), 1)
if not entry.type.same_as(type):
if visibility == 'extern' and entry.visibility == 'extern':
can_override = False
if self.is_cpp():
can_override = True
elif cname:
# if all alternatives have different cnames,
# it's safe to allow signature overrides
for alt_entry in entry.all_alternatives():
if not alt_entry.cname or cname == alt_entry.cname:
break # cname not unique!
else:
can_override = True
if can_override:
temp = self.add_cfunction(name, type, pos, cname, visibility, modifiers)
temp.overloaded_alternatives = entry.all_alternatives()
entry = temp
...
...
@@ -574,6 +585,7 @@ class Scope(object):
entry.is_implemented = True
if modifiers:
entry.func_modifiers = modifiers
entry.utility_code = utility_code
return entry
def add_cfunction(self, name, type, pos, cname, visibility, modifiers):
...
...
@@ -711,8 +723,8 @@ class BuiltinScope(Scope):
# If python_equiv == "
*
", the Python equivalent has the same name
# as the entry, otherwise it has the name specified by python_equiv.
name = EncodedString(name)
entry = self.declare_cfunction(name, type, None, cname, visibility='extern'
)
entry.utility_code = utility_code
entry = self.declare_cfunction(name, type, None, cname, visibility='extern'
,
utility_code = utility_code)
if python_equiv:
if python_equiv == "
*
":
python_equiv = name
...
...
@@ -1352,7 +1364,7 @@ class StructOrUnionScope(Scope):
def declare_cfunction(self, name, type, pos,
cname = None, visibility = '
private
', defining = 0,
api = 0, in_pxd = 0, modifiers = ()):
api = 0, in_pxd = 0, modifiers = ()):
# currently no utility code ...
return self.declare_var(name, type, pos, cname, visibility)
class ClassScope(Scope):
...
...
@@ -1407,7 +1419,7 @@ class PyClassScope(ClassScope):
entry
=
Scope
.
declare_var
(
self
,
name
,
type
,
pos
,
cname
,
visibility
,
is_cdef
)
entry
.
is_pyglobal
=
1
entry
.
is_
real_dict
=
1
entry
.
is_
pyclass_attr
=
1
return
entry
def
add_default_value
(
self
,
type
):
...
...
@@ -1528,7 +1540,8 @@ class CClassScope(ClassScope):
def
declare_cfunction
(
self
,
name
,
type
,
pos
,
cname
=
None
,
visibility
=
'private'
,
defining
=
0
,
api
=
0
,
in_pxd
=
0
,
modifiers
=
()):
defining
=
0
,
api
=
0
,
in_pxd
=
0
,
modifiers
=
(),
utility_code
=
None
):
if
get_special_method_signature
(
name
):
error
(
pos
,
"Special methods must be declared with 'def', not 'cdef'"
)
args
=
type
.
args
...
...
@@ -1562,6 +1575,7 @@ class CClassScope(ClassScope):
visibility
,
modifiers
)
if
defining
:
entry
.
func_cname
=
self
.
mangle
(
Naming
.
func_prefix
,
name
)
entry
.
utility_code
=
utility_code
return
entry
def
add_cfunction
(
self
,
name
,
type
,
pos
,
cname
,
visibility
,
modifiers
):
...
...
@@ -1572,7 +1586,20 @@ class CClassScope(ClassScope):
entry
.
is_cmethod
=
1
entry
.
prev_entry
=
prev_entry
return
entry
def
declare_builtin_cfunction
(
self
,
name
,
type
,
cname
,
utility_code
=
None
):
# overridden methods of builtin types still have their Python
# equivalent that must be accessible to support bound methods
name
=
EncodedString
(
name
)
entry
=
self
.
declare_cfunction
(
name
,
type
,
None
,
cname
,
visibility
=
'extern'
,
utility_code
=
utility_code
)
var_entry
=
Entry
(
name
,
name
,
py_object_type
)
var_entry
.
is_variable
=
1
var_entry
.
is_builtin
=
1
var_entry
.
utility_code
=
utility_code
entry
.
as_variable
=
var_entry
return
entry
def
declare_property
(
self
,
name
,
doc
,
pos
):
entry
=
self
.
lookup_here
(
name
)
if
entry
is
None
:
...
...
@@ -1660,7 +1687,7 @@ class CppClassScope(Scope):
def
declare_cfunction
(
self
,
name
,
type
,
pos
,
cname
=
None
,
visibility
=
'extern'
,
defining
=
0
,
api
=
0
,
in_pxd
=
0
,
modifiers
=
()):
api
=
0
,
in_pxd
=
0
,
modifiers
=
()
,
utility_code
=
None
):
if
name
==
self
.
name
.
split
(
'::'
)[
-
1
]
and
cname
is
None
:
self
.
check_base_default_constructor
(
pos
)
name
=
'<init>'
...
...
@@ -1669,6 +1696,8 @@ class CppClassScope(Scope):
entry
=
self
.
declare_var
(
name
,
type
,
pos
,
cname
,
visibility
)
if
prev_entry
:
entry
.
overloaded_alternatives
=
prev_entry
.
all_alternatives
()
entry
.
utility_code
=
utility_code
return
entry
def
declare_inherited_cpp_attributes
(
self
,
base_scope
):
# Declare entries for all the C++ attributes of an
...
...
@@ -1689,7 +1718,8 @@ class CppClassScope(Scope):
for
base_entry
in
base_scope
.
cfunc_entries
:
entry
=
self
.
declare_cfunction
(
base_entry
.
name
,
base_entry
.
type
,
base_entry
.
pos
,
base_entry
.
cname
,
base_entry
.
visibility
,
base_entry
.
func_modifiers
)
base_entry
.
visibility
,
base_entry
.
func_modifiers
,
utility_code
=
base_entry
.
utility_code
)
entry
.
is_inherited
=
1
def
specialize
(
self
,
values
):
...
...
@@ -1710,7 +1740,8 @@ class CppClassScope(Scope):
scope
.
declare_cfunction
(
e
.
name
,
e
.
type
.
specialize
(
values
),
e
.
pos
,
e
.
cname
)
e
.
cname
,
utility_code
=
e
.
utility_code
)
return
scope
def
add_include_file
(
self
,
filename
):
...
...
Cython/Compiler/TypeSlots.py
View file @
4f208e13
...
...
@@ -64,6 +64,7 @@ class Signature(object):
error_value_map
=
{
'O'
:
"NULL"
,
'T'
:
"NULL"
,
'i'
:
"-1"
,
'b'
:
"-1"
,
'l'
:
"-1"
,
...
...
@@ -91,6 +92,10 @@ class Signature(object):
# argument is 'self' for methods or 'class' for classmethods
return
self
.
fixed_arg_format
[
i
]
==
'T'
def
returns_self_type
(
self
):
# return type is same as 'self' argument type
return
self
.
ret_format
==
'T'
def
fixed_arg_type
(
self
,
i
):
return
self
.
format_map
[
self
.
fixed_arg_format
[
i
]]
...
...
@@ -100,13 +105,20 @@ class Signature(object):
def
exception_value
(
self
):
return
self
.
error_value_map
.
get
(
self
.
ret_format
)
def
function_type
(
self
):
def
function_type
(
self
,
self_arg_override
=
None
):
# Construct a C function type descriptor for this signature
args
=
[]
for
i
in
xrange
(
self
.
num_fixed_args
()):
arg_type
=
self
.
fixed_arg_type
(
i
)
args
.
append
(
PyrexTypes
.
CFuncTypeArg
(
""
,
arg_type
,
None
))
ret_type
=
self
.
return_type
()
if
self_arg_override
is
not
None
and
self
.
is_self_arg
(
i
):
assert
isinstance
(
self_arg_override
,
PyrexTypes
.
CFuncTypeArg
)
args
.
append
(
self_arg_override
)
else
:
arg_type
=
self
.
fixed_arg_type
(
i
)
args
.
append
(
PyrexTypes
.
CFuncTypeArg
(
""
,
arg_type
,
None
))
if
self_arg_override
is
not
None
and
self
.
returns_self_type
():
ret_type
=
self_arg_override
.
type
else
:
ret_type
=
self
.
return_type
()
exc_value
=
self
.
exception_value
()
return
PyrexTypes
.
CFuncType
(
ret_type
,
args
,
exception_value
=
exc_value
)
...
...
Cython/Compiler/UtilNodes.py
View file @
4f208e13
...
...
@@ -8,6 +8,7 @@ import Nodes
import
ExprNodes
from
Nodes
import
Node
from
ExprNodes
import
AtomicExprNode
from
PyrexTypes
import
c_ptr_type
class
TempHandle
(
object
):
# THIS IS DEPRECATED, USE LetRefNode instead
...
...
@@ -196,6 +197,8 @@ class LetNodeMixin:
def
setup_temp_expr
(
self
,
code
):
self
.
temp_expression
.
generate_evaluation_code
(
code
)
self
.
temp_type
=
self
.
temp_expression
.
type
if
self
.
temp_type
.
is_array
:
self
.
temp_type
=
c_ptr_type
(
self
.
temp_type
.
base_type
)
self
.
_result_in_temp
=
self
.
temp_expression
.
result_in_temp
()
if
self
.
_result_in_temp
:
self
.
temp
=
self
.
temp_expression
.
result
()
...
...
Cython/Compiler/Visitor.pxd
View file @
4f208e13
...
...
@@ -3,14 +3,15 @@ cimport cython
cdef
class
BasicVisitor
:
cdef
dict
dispatch_table
cpdef
visit
(
self
,
obj
)
cpdef
find_handler
(
self
,
obj
)
cdef
_visit
(
self
,
obj
)
cdef
find_handler
(
self
,
obj
)
cdef
class
TreeVisitor
(
BasicVisitor
):
cdef
public
list
access_path
c
pdef
visitchild
(
self
,
child
,
parent
,
attrname
,
idx
)
c
def
_
visitchild
(
self
,
child
,
parent
,
attrname
,
idx
)
@
cython
.
locals
(
idx
=
int
)
c
p
def
dict
_visitchildren
(
self
,
parent
,
attrs
)
#
cpdef visitchildren(self, parent, attrs=*)
cdef
dict
_visitchildren
(
self
,
parent
,
attrs
)
cpdef
visitchildren
(
self
,
parent
,
attrs
=*
)
cdef
class
VisitorTransform
(
TreeVisitor
):
cpdef
visitchildren
(
self
,
parent
,
attrs
=*
)
...
...
@@ -19,3 +20,15 @@ cdef class VisitorTransform(TreeVisitor):
cdef
class
CythonTransform
(
VisitorTransform
):
cdef
public
context
cdef
public
current_directives
cdef
class
ScopeTrackingTransform
(
CythonTransform
):
cdef
public
scope_type
cdef
public
scope_node
cdef
visit_scope
(
self
,
node
,
scope_type
)
cdef
class
EnvTransform
(
CythonTransform
):
cdef
public
list
env_stack
cdef
class
RecursiveNodeReplacer
(
VisitorTransform
):
cdef
public
orig_node
cdef
public
new_node
Cython/Compiler/Visitor.py
View file @
4f208e13
...
...
@@ -20,6 +20,9 @@ class BasicVisitor(object):
self
.
dispatch_table
=
{}
def
visit
(
self
,
obj
):
return
self
.
_visit
(
obj
)
def
_visit
(
self
,
obj
):
try
:
handler_method
=
self
.
dispatch_table
[
type
(
obj
)]
except
KeyError
:
...
...
@@ -173,10 +176,10 @@ class TreeVisitor(BasicVisitor):
last_node
.
pos
,
self
.
__class__
.
__name__
,
u'
\
n
'
.
join
(
trace
),
e
,
stacktrace
)
def
visitchild
(
self
,
child
,
parent
,
attrname
,
idx
):
def
_
visitchild
(
self
,
child
,
parent
,
attrname
,
idx
):
self
.
access_path
.
append
((
parent
,
attrname
,
idx
))
try
:
result
=
self
.
visit
(
child
)
result
=
self
.
_
visit
(
child
)
except
Errors
.
CompileError
:
raise
except
Exception
,
e
:
...
...
@@ -206,9 +209,9 @@ class TreeVisitor(BasicVisitor):
child
=
getattr
(
parent
,
attr
)
if
child
is
not
None
:
if
type
(
child
)
is
list
:
childretval
=
[
self
.
visitchild
(
x
,
parent
,
attr
,
idx
)
for
idx
,
x
in
enumerate
(
child
)]
childretval
=
[
self
.
_
visitchild
(
x
,
parent
,
attr
,
idx
)
for
idx
,
x
in
enumerate
(
child
)]
else
:
childretval
=
self
.
visitchild
(
child
,
parent
,
attr
,
None
)
childretval
=
self
.
_
visitchild
(
child
,
parent
,
attr
,
None
)
assert
not
isinstance
(
childretval
,
list
),
'Cannot insert list here: %s in %r'
%
(
attr
,
parent
)
result
[
attr
]
=
childretval
return
result
...
...
@@ -256,7 +259,7 @@ class VisitorTransform(TreeVisitor):
return
node
def
__call__
(
self
,
root
):
return
self
.
visit
(
root
)
return
self
.
_
visit
(
root
)
class
CythonTransform
(
VisitorTransform
):
"""
...
...
@@ -288,8 +291,8 @@ class CythonTransform(VisitorTransform):
class
ScopeTrackingTransform
(
CythonTransform
):
# Keeps track of type of scopes
scope_type
=
None
# can be either of 'module', 'function', 'cclass', 'pyclass
'
scope_node
=
Non
e
#scope_type: can be either of 'module', 'function', 'cclass', 'pyclass', 'struct
'
#scope_node: the node that owns the current scop
e
def
visit_ModuleNode
(
self
,
node
):
self
.
scope_type
=
'module'
...
...
@@ -388,7 +391,7 @@ class PrintTree(TreeVisitor):
def
__call__
(
self
,
tree
,
phase
=
None
):
print
(
"Parse tree dump at phase '%s'"
%
phase
)
self
.
visit
(
tree
)
self
.
_
visit
(
tree
)
return
tree
# Don't do anything about process_list, the defaults gives
...
...
Cython/Plex/Scanners.pxd
View file @
4f208e13
...
...
@@ -5,7 +5,7 @@ cdef class Scanner:
cdef
public
lexicon
cdef
public
stream
cdef
public
name
cdef
public
buffer
cdef
public
unicode
buffer
cdef
public
Py_ssize_t
buf_start_pos
cdef
public
Py_ssize_t
next_pos
cdef
public
Py_ssize_t
cur_pos
...
...
@@ -26,16 +26,15 @@ cdef class Scanner:
@
cython
.
locals
(
input_state
=
long
)
cpdef
next_char
(
self
)
@
cython
.
locals
(
queue
=
list
)
cpdef
tuple
read
(
self
)
c
p
def
tuple
scan_a_token
(
self
)
cdef
tuple
scan_a_token
(
self
)
cpdef
tuple
position
(
self
)
@
cython
.
locals
(
cur_pos
=
long
,
cur_line
=
long
,
cur_line_start
=
long
,
input_state
=
long
,
next_pos
=
long
,
input_state
=
long
,
next_pos
=
long
,
state
=
dict
,
buf_start_pos
=
long
,
buf_len
=
long
,
buf_index
=
long
,
trace
=
bint
,
discard
=
long
)
c
p
def
run_machine_inlined
(
self
)
trace
=
bint
,
discard
=
long
,
data
=
unicode
,
buffer
=
unicode
)
cdef
run_machine_inlined
(
self
)
cpdef
begin
(
self
,
state
)
cpdef
produce
(
self
,
value
,
text
=
*
)
Cython/Plex/Scanners.py
View file @
4f208e13
...
...
@@ -163,7 +163,8 @@ class Scanner(object):
buffer
=
self
.
buffer
buf_start_pos
=
self
.
buf_start_pos
buf_len
=
len
(
buffer
)
backup_state
=
None
b_action
,
b_cur_pos
,
b_cur_line
,
b_cur_line_start
,
b_cur_char
,
b_input_state
,
b_next_pos
=
\
None
,
0
,
0
,
0
,
u''
,
0
,
0
trace
=
self
.
trace
while
1
:
if
trace
:
#TRACE#
...
...
@@ -173,8 +174,8 @@ class Scanner(object):
#action = state.action #@slow
action
=
state
[
'action'
]
#@fast
if
action
is
not
None
:
b
ackup_state
=
(
action
,
cur_pos
,
cur_line
,
cur_line_start
,
cur_char
,
input_state
,
next_pos
)
b
_action
,
b_cur_pos
,
b_cur_line
,
b_cur_line_start
,
b_cur_char
,
b_input_state
,
b_next_pos
=
\
action
,
cur_pos
,
cur_line
,
cur_line_start
,
cur_char
,
input_state
,
next_pos
# End inlined self.save_for_backup()
c
=
cur_char
#new_state = state.new_state(c) #@slow
...
...
@@ -234,9 +235,11 @@ class Scanner(object):
if
trace
:
#TRACE#
print
(
"blocked"
)
#TRACE#
# Begin inlined: action = self.back_up()
if
backup_state
is
not
None
:
(
action
,
cur_pos
,
cur_line
,
cur_line_start
,
cur_char
,
input_state
,
next_pos
)
=
backup_state
if
b_action
is
not
None
:
(
action
,
cur_pos
,
cur_line
,
cur_line_start
,
cur_char
,
input_state
,
next_pos
)
=
\
(
b_action
,
b_cur_pos
,
b_cur_line
,
b_cur_line_start
,
b_cur_char
,
b_input_state
,
b_next_pos
)
else
:
action
=
None
break
# while 1
...
...
Cython/Shadow.py
View file @
4f208e13
...
...
@@ -18,6 +18,10 @@ def inline(f, *args, **kwds):
assert
len
(
args
)
==
len
(
kwds
)
==
0
return
f
def
compile
(
f
):
from
Cython.Build.Inline
import
RuntimeCompiledFunction
return
RuntimeCompiledFunction
(
f
)
# Special functions
def
cdiv
(
a
,
b
):
...
...
Cython/TestUtils.py
View file @
4f208e13
...
...
@@ -173,15 +173,23 @@ def unpack_source_tree(tree_file, dir=None):
dir
=
tempfile
.
mkdtemp
()
header
=
[]
cur_file
=
None
for
line
in
open
(
tree_file
).
readlines
():
f
=
open
(
tree_file
)
lines
=
f
.
readlines
()
f
.
close
()
f
=
None
for
line
in
lines
:
if
line
[:
5
]
==
'#####'
:
filename
=
line
.
strip
().
strip
(
'#'
).
strip
().
replace
(
'/'
,
os
.
path
.
sep
)
path
=
os
.
path
.
join
(
dir
,
filename
)
if
not
os
.
path
.
exists
(
os
.
path
.
dirname
(
path
)):
os
.
makedirs
(
os
.
path
.
dirname
(
path
))
if
cur_file
is
not
None
:
cur_file
.
close
()
cur_file
=
open
(
path
,
'w'
)
elif
cur_file
is
not
None
:
cur_file
.
write
(
line
)
else
:
header
.
append
(
line
)
if
cur_file
is
not
None
:
cur_file
.
close
()
return
dir
,
''
.
join
(
header
)
runtests.py
View file @
4f208e13
...
...
@@ -316,18 +316,22 @@ class CythonCompileTestCase(unittest.TestCase):
source_file
=
os
.
path
.
join
(
test_directory
,
module
)
+
'.pyx'
source_and_output
=
codecs
.
open
(
self
.
find_module_source_file
(
source_file
),
'rU'
,
'ISO-8859-1'
)
out
=
codecs
.
open
(
os
.
path
.
join
(
workdir
,
module
+
'.pyx'
),
'w'
,
'ISO-8859-1'
)
for
line
in
source_and_output
:
last_line
=
line
if
line
.
startswith
(
"_ERRORS"
):
out
.
close
()
out
=
ErrorWriter
()
else
:
out
.
write
(
line
)
try
:
out
=
codecs
.
open
(
os
.
path
.
join
(
workdir
,
module
+
'.pyx'
),
'w'
,
'ISO-8859-1'
)
for
line
in
source_and_output
:
last_line
=
line
if
line
.
startswith
(
"_ERRORS"
):
out
.
close
()
out
=
ErrorWriter
()
else
:
out
.
write
(
line
)
finally
:
source_and_output
.
close
()
try
:
geterrors
=
out
.
geterrors
except
AttributeError
:
out
.
close
()
return
[]
else
:
return
geterrors
()
...
...
@@ -660,7 +664,10 @@ def collect_doctests(path, module_prefix, suite, selectors):
for
f
in
filenames
:
if
file_matches
(
f
):
if
not
f
.
endswith
(
'.py'
):
continue
filepath
=
os
.
path
.
join
(
dirpath
,
f
)[:
-
len
(
".py"
)]
filepath
=
os
.
path
.
join
(
dirpath
,
f
)
if
os
.
path
.
getsize
(
filepath
)
==
0
:
continue
if
'no doctest'
in
open
(
filepath
).
next
():
continue
filepath
=
filepath
[:
-
len
(
".py"
)]
modulename
=
module_prefix
+
filepath
[
len
(
path
)
+
1
:].
replace
(
os
.
path
.
sep
,
'.'
)
if
not
[
1
for
match
in
selectors
if
match
(
modulename
)
]:
continue
...
...
setup.py
View file @
4f208e13
...
...
@@ -101,8 +101,12 @@ def compile_cython_modules(profile=False):
pyx_source_file
=
source_file
+
".py"
else
:
pyx_source_file
=
source_file
+
".pyx"
dep_files
=
[]
if
os
.
path
.
exists
(
source_file
+
'.pxd'
):
dep_files
.
append
(
source_file
+
'.pxd'
)
extensions
.
append
(
Extension
(
module
,
sources
=
[
pyx_source_file
])
Extension
(
module
,
sources
=
[
pyx_source_file
],
depends
=
dep_files
)
)
class
build_ext
(
build_ext_orig
):
...
...
@@ -154,9 +158,18 @@ def compile_cython_modules(profile=False):
else
:
pyx_source_file
=
source_file
+
".pyx"
c_source_file
=
source_file
+
".c"
if
not
os
.
path
.
exists
(
c_source_file
)
or
\
Utils
.
file_newer_than
(
pyx_source_file
,
Utils
.
modification_time
(
c_source_file
)):
source_is_newer
=
False
if
not
os
.
path
.
exists
(
c_source_file
):
source_is_newer
=
True
else
:
c_last_modified
=
Utils
.
modification_time
(
c_source_file
)
if
Utils
.
file_newer_than
(
pyx_source_file
,
c_last_modified
):
source_is_newer
=
True
else
:
pxd_source_file
=
source_file
+
".pxd"
if
os
.
path
.
exists
(
pxd_source_file
)
and
Utils
.
file_newer_than
(
pxd_source_file
,
c_last_modified
):
source_is_newer
=
True
if
source_is_newer
:
print
(
"Compiling module %s ..."
%
module
)
result
=
compile
(
pyx_source_file
)
c_source_file
=
result
.
c_file
...
...
@@ -241,6 +254,7 @@ setup(
scripts
=
scripts
,
packages
=
[
'Cython'
,
'Cython.Build'
,
'Cython.Compiler'
,
'Cython.Runtime'
,
'Cython.Distutils'
,
...
...
tests/bugs.txt
View file @
4f208e13
...
...
@@ -16,8 +16,12 @@ with_statement_module_level_T536
function_as_method_T494
closure_inside_cdef_T554
ipow_crash_T562
pure_mode_cmethod_inheritance_T583
# CPython regression tests that don't current work:
pyregr.test_threadsignals
pyregr.test_module
# CPython regression tests that don't make sense
pyregr.test_gdb
tests/build/basic_cythonize.srctree
View file @
4f208e13
...
...
@@ -5,7 +5,7 @@ PYTHON -c "import a"
# TODO: Better interface...
from Cython.
Compiler
.Dependencies import cythonize
from Cython.
Build
.Dependencies import cythonize
from distutils.core import setup
...
...
tests/build/inline_distutils.srctree
View file @
4f208e13
...
...
@@ -5,7 +5,7 @@ PYTHON -c "import a"
# TODO: Better interface...
from Cython.
Compiler
.Dependencies import cythonize
from Cython.
Build
.Dependencies import cythonize
from distutils.core import setup
...
...
tests/compile/builtinfuncs.pyx
View file @
4f208e13
...
...
@@ -15,6 +15,7 @@ cdef int f() except -1:
i
=
len
(
x
)
x
=
open
(
y
,
z
)
x
=
pow
(
y
,
z
,
w
)
x
=
pow
(
y
,
z
)
x
=
reload
(
y
)
x
=
repr
(
y
)
setattr
(
x
,
y
,
z
)
...
...
tests/errors/cdef_syntax.pyx
View file @
4f208e13
...
...
@@ -6,5 +6,5 @@ cdef nogil class test: pass
_ERRORS
=
u"""
2: 5: Expected an identifier, found 'pass'
3: 9: Empty declarator
4:11: Expected ':'
4:11: Expected ':'
, found 'class'
"""
tests/errors/cpdef_syntax.pyx
View file @
4f208e13
...
...
@@ -5,5 +5,5 @@ cpdef nogil class test: pass
_ERRORS
=
u"""
2: 6: cdef blocks cannot be declared cpdef
3: 6: cdef blocks cannot be declared cpdef
3:12: Expected ':'
3:12: Expected ':'
, found 'class'
"""
tests/errors/e2_packedstruct_T290.pyx
View file @
4f208e13
...
...
@@ -2,5 +2,5 @@ cdef packed foo:
pass
_ERRORS
=
u"""
1:12: Expected 'struct'
1:12: Expected 'struct'
, found 'foo'
"""
tests/run/bound_builtin_methods_T589.pyx
0 → 100644
View file @
4f208e13
cimport
cython
_set
=
set
# CPython may not define it (in Py2.3), but Cython does :)
def
test_set_clear_bound
():
"""
>>> type(test_set_clear_bound()) is _set
True
>>> list(test_set_clear_bound())
[]
"""
cdef
set
s1
=
set
([
1
])
clear
=
s1
.
clear
clear
()
return
s1
text
=
u'ab jd sdflk as sa sadas asdas fsdf '
pipe_sep
=
u'|'
@
cython
.
test_assert_path_exists
(
"//SimpleCallNode"
,
"//SimpleCallNode//NameNode"
)
def
test_unicode_join_bound
(
unicode
sep
,
l
):
"""
>>> l = text.split()
>>> len(l)
8
>>> print( pipe_sep.join(l) )
ab|jd|sdflk|as|sa|sadas|asdas|fsdf
>>> print( test_unicode_join_bound(pipe_sep, l) )
ab|jd|sdflk|as|sa|sadas|asdas|fsdf
"""
join
=
sep
.
join
return
join
(
l
)
tests/run/builtin_next.pyx
0 → 100644
View file @
4f208e13
import
sys
IS_PY3
=
sys
.
version_info
[
0
]
>=
3
__doc__
=
"""
>>> it = iter([1,2,3])
>>> if not IS_PY3:
... next = type(it).next
>>> next(it)
1
>>> next(it)
2
>>> next(it)
3
>>> next(it)
Traceback (most recent call last):
StopIteration
>>> next(it)
Traceback (most recent call last):
StopIteration
>>> if IS_PY3: next(it, 123)
... else: print(123)
123
"""
if
IS_PY3
:
__doc__
+=
"""
>>> next(123)
Traceback (most recent call last):
TypeError: int object is not an iterator
"""
def
test_next_not_iterable
(
it
):
"""
>>> test_next_not_iterable(123)
Traceback (most recent call last):
TypeError: int object is not an iterator
"""
return
next
(
it
)
def
test_single_next
(
it
):
"""
>>> it = iter([1,2,3])
>>> test_single_next(it)
1
>>> test_single_next(it)
2
>>> test_single_next(it)
3
>>> test_single_next(it)
Traceback (most recent call last):
StopIteration
>>> test_single_next(it)
Traceback (most recent call last):
StopIteration
"""
return
next
(
it
)
def
test_default_next
(
it
,
default
):
"""
>>> it = iter([1,2,3])
>>> test_default_next(it, 99)
1
>>> test_default_next(it, 99)
2
>>> test_default_next(it, 99)
3
>>> test_default_next(it, 99)
99
>>> test_default_next(it, 99)
99
"""
return
next
(
it
,
default
)
def
test_next_override
(
it
):
"""
>>> it = iter([1,2,3])
>>> test_next_override(it)
1
>>> test_next_override(it)
1
>>> test_next_override(it)
1
>>> test_next_override(it)
1
"""
def
next
(
it
):
return
1
return
next
(
it
)
tests/run/builtin_pow.pyx
0 → 100644
View file @
4f208e13
def
pow3
(
a
,
b
,
c
):
"""
>>> pow3(2,3,5)
3
>>> pow3(3,3,5)
2
"""
return
pow
(
a
,
b
,
c
)
def
pow3_const
():
"""
>>> pow3_const()
3
"""
return
pow
(
2
,
3
,
5
)
def
pow2
(
a
,
b
):
"""
>>> pow2(2,3)
8
>>> pow2(3,3)
27
"""
return
pow
(
a
,
b
)
def
pow2_const
():
"""
>>> pow2_const()
8
"""
return
pow
(
2
,
3
)
def
pow_args
(
*
args
):
"""
>>> pow_args(2,3)
8
>>> pow_args(2,3,5)
3
"""
return
pow
(
*
args
)
tests/run/consts.pyx
View file @
4f208e13
...
...
@@ -37,8 +37,8 @@ def long_int_mix():
"""
>>> long_int_mix() == 1 + (2 * 3) // 2
True
>>> if IS_PY3: type(long_int_mix()) is int
... else: type(long_int_mix()) is long
>>> if IS_PY3: type(long_int_mix()) is int
or type(long_int_mix())
... else: type(long_int_mix()) is long
or type(long_int_mix())
True
"""
return
1L
+
(
2
*
3L
)
//
2
...
...
tests/run/cython3.pyx
View file @
4f208e13
# cython: language_level=3
cimport
cython
try
:
sorted
except
NameError
:
...
...
@@ -15,6 +17,25 @@ def print_function(*args):
"""
print
(
*
args
)
# this isn't valid Py2 syntax
def
exec3_function
(
cmd
):
"""
>>> exec3_function('a = 1+1')['a']
2
"""
g
=
{}
l
=
{}
exec
(
cmd
,
g
,
l
)
return
l
def
exec2_function
(
cmd
):
"""
>>> exec2_function('a = 1+1')['a']
2
"""
g
=
{}
exec
(
cmd
,
g
)
return
g
ustring
=
"abcdefg"
def
unicode_literals
():
...
...
@@ -36,6 +57,13 @@ def list_comp():
assert
x
==
'abc'
# don't leak in Py3 code
return
result
def
list_comp_unknown_type
(
l
):
"""
>>> list_comp_unknown_type(range(5))
[0, 4, 8]
"""
return
[
x
*
2
for
x
in
l
if
x
%
2
==
0
]
def
set_comp
():
"""
>>> sorted(set_comp())
...
...
@@ -55,3 +83,26 @@ def dict_comp():
result
=
{
x
:
x
*
2
for
x
in
range
(
5
)
if
x
%
2
==
0
}
assert
x
==
'abc'
# don't leak
return
result
# in Python 3, d.keys/values/items() are the iteration methods
@
cython
.
test_assert_path_exists
(
"//WhileStatNode"
,
"//WhileStatNode/SimpleCallNode"
,
"//WhileStatNode/SimpleCallNode/NameNode"
)
@
cython
.
test_fail_if_path_exists
(
"//ForInStatNode"
)
def
dict_iter
(
dict
d
):
"""
>>> d = {'a' : 1, 'b' : 2, 'c' : 3}
>>> keys, values, items = dict_iter(d)
>>> sorted(keys)
['a', 'b', 'c']
>>> sorted(values)
[1, 2, 3]
>>> sorted(items)
[('a', 1), ('b', 2), ('c', 3)]
"""
keys
=
[
key
for
key
in
d
.
keys
()
]
values
=
[
value
for
value
in
d
.
values
()
]
items
=
[
item
for
item
in
d
.
items
()
]
return
keys
,
values
,
items
tests/run/inplace.pyx
View file @
4f208e13
__doc__
=
u"""
>>> str(f(5, 7))
'29509034655744'
"""
cimport
cython
def
f
(
a
,
b
):
"""
>>> str(f(5, 7))
'29509034655744'
"""
a
+=
b
a
*=
b
a
**=
b
...
...
@@ -117,3 +117,130 @@ def test_side_effects():
b
[
side_effect
(
3
)]
+=
10
b
[
c_side_effect
(
4
)]
+=
100
return
a
,
[
b
[
i
]
for
i
from
0
<=
i
<
5
]
@
cython
.
cdivision
(
True
)
def
test_inplace_cdivision
(
int
a
,
int
b
):
"""
>>> test_inplace_cdivision(13, 10)
3
>>> test_inplace_cdivision(13, -10)
3
>>> test_inplace_cdivision(-13, 10)
-3
>>> test_inplace_cdivision(-13, -10)
-3
"""
a
%=
b
return
a
@
cython
.
cdivision
(
False
)
def
test_inplace_pydivision
(
int
a
,
int
b
):
"""
>>> test_inplace_pydivision(13, 10)
3
>>> test_inplace_pydivision(13, -10)
-7
>>> test_inplace_pydivision(-13, 10)
7
>>> test_inplace_pydivision(-13, -10)
-3
"""
a
%=
b
return
a
def
test_complex_inplace
(
double
complex
x
,
double
complex
y
):
"""
>>> test_complex_inplace(1, 1)
(2+0j)
>>> test_complex_inplace(2, 3)
(15+0j)
>>> test_complex_inplace(2+3j, 4+5j)
(-16+62j)
"""
x
+=
y
x
*=
y
return
x
# The following is more subtle than one might expect.
cdef
struct
Inner
:
int
x
cdef
struct
Aa
:
int
value
Inner
inner
cdef
struct
NestedA
:
Aa
a
cdef
struct
ArrayOfA
:
Aa
[
10
]
a
def
nested_struct_assignment
():
"""
>>> nested_struct_assignment()
"""
cdef
NestedA
nested
nested
.
a
.
value
=
2
nested
.
a
.
value
+=
3
assert
nested
.
a
.
value
==
5
nested
.
a
.
inner
.
x
=
5
nested
.
a
.
inner
.
x
+=
10
assert
nested
.
a
.
inner
.
x
==
15
def
nested_array_assignment
():
"""
>>> nested_array_assignment()
c side effect 0
c side effect 1
"""
cdef
ArrayOfA
array
array
.
a
[
0
].
value
=
2
array
.
a
[
c_side_effect
(
0
)].
value
+=
3
assert
array
.
a
[
0
].
value
==
5
array
.
a
[
1
].
inner
.
x
=
5
array
.
a
[
c_side_effect
(
1
)].
inner
.
x
+=
10
assert
array
.
a
[
1
].
inner
.
x
==
15
cdef
class
VerboseDict
(
object
):
cdef
name
cdef
dict
dict
def
__init__
(
self
,
name
,
**
kwds
):
self
.
name
=
name
self
.
dict
=
kwds
def
__getitem__
(
self
,
key
):
print
self
.
name
,
"__getitem__"
,
key
return
self
.
dict
[
key
]
def
__setitem__
(
self
,
key
,
value
):
print
self
.
name
,
"__setitem__"
,
key
,
value
self
.
dict
[
key
]
=
value
def
__repr__
(
self
):
return
repr
(
self
.
name
)
def
deref_and_increment
(
o
,
key
):
"""
>>> deref_and_increment({'a': 1}, 'a')
side effect a
>>> v = VerboseDict('v', a=10)
>>> deref_and_increment(v, 'a')
side effect a
v __getitem__ a
v __setitem__ a 11
"""
o
[
side_effect
(
key
)]
+=
1
def
double_deref_and_increment
(
o
,
key1
,
key2
):
"""
>>> v = VerboseDict('v', a=10)
>>> w = VerboseDict('w', vkey=v)
>>> double_deref_and_increment(w, 'vkey', 'a')
side effect vkey
w __getitem__ vkey
side effect a
v __getitem__ a
v __setitem__ a 11
"""
o
[
side_effect
(
key1
)][
side_effect
(
key2
)]
+=
1
tests/run/int_literals.pyx
View file @
4f208e13
...
...
@@ -3,11 +3,18 @@ __doc__ = u"""
(1, 1L, -1L, 18446744073709551615L)
>>> py_longs()
(1, 1L, 100000000000000000000000000000000L, -100000000000000000000000000000000L)
>>> py_huge_calculated_long()
1606938044258990275541962092341162602522202993782792835301376L
>>> py_huge_computation_small_result_neg()
(-2535301200456458802993406410752L, -2535301200456458802993406410752L)
"""
import
sys
cimport
cython
from
cython
cimport
typeof
import
sys
if
sys
.
version_info
[
0
]
>=
3
:
__doc__
=
__doc__
.
replace
(
u'L'
,
u''
)
...
...
@@ -27,6 +34,25 @@ def c_longs():
def
py_longs
():
return
1
,
1L
,
100000000000000000000000000000000
,
-
100000000000000000000000000000000
@
cython
.
test_fail_if_path_exists
(
"//NumBinopNode"
,
"//IntBinopNode"
)
@
cython
.
test_assert_path_exists
(
"//ReturnStatNode/IntNode"
)
def
py_huge_calculated_long
():
return
1
<<
200
@
cython
.
test_fail_if_path_exists
(
"//NumBinopNode"
,
"//IntBinopNode"
)
@
cython
.
test_assert_path_exists
(
"//ReturnStatNode/IntNode"
)
def
py_huge_computation_small_result
():
"""
>>> py_huge_computation_small_result()
2
"""
return
(
1
<<
200
)
>>
199
@
cython
.
test_fail_if_path_exists
(
"//NumBinopNode"
,
"//IntBinopNode"
)
#@cython.test_assert_path_exists("//ReturnStatNode/IntNode")
def
py_huge_computation_small_result_neg
():
return
-
(
2
**
101
),
(
-
2
)
**
101
def
large_literal
():
"""
>>> type(large_literal()) is int
...
...
@@ -59,50 +85,67 @@ def c_long_types():
def
c_oct
():
"""
>>> c_oct()
(1, 17, 63)
(1,
-
17, 63)
"""
cdef
int
a
=
0o01
cdef
int
b
=
0o21
cdef
int
b
=
-
0o21
cdef
int
c
=
0o77
return
a
,
b
,
c
def
c_oct_py2_legacy
():
"""
>>> c_oct_py2_legacy()
(1, -17, 63)
"""
cdef
int
a
=
001
cdef
int
b
=
-
021
cdef
int
c
=
077
return
a
,
b
,
c
def
py_oct
():
"""
>>> py_oct()
(1, 17, 63)
(1, -17, 63)
"""
return
0o01
,
-
0o21
,
0o77
def
py_oct_py2_legacy
():
"""
>>> py_oct_py2_legacy()
(1, -17, 63)
"""
return
0
o01
,
0o21
,
0o
77
return
0
01
,
-
021
,
0
77
def
c_hex
():
"""
>>> c_hex()
(1, 33, 255)
(1,
-
33, 255)
"""
cdef
int
a
=
0x01
cdef
int
b
=
0x21
cdef
int
b
=
-
0x21
cdef
int
c
=
0xFF
return
a
,
b
,
c
def
py_hex
():
"""
>>> py_hex()
(1, 33, 255)
(1,
-
33, 255)
"""
return
0x01
,
0x21
,
0xFF
return
0x01
,
-
0x21
,
0xFF
def
c_bin
():
"""
>>> c_bin()
(1, 2, 15)
(1,
-
2, 15)
"""
cdef
int
a
=
0b01
cdef
int
b
=
0b10
cdef
int
b
=
-
0b10
cdef
int
c
=
0b1111
return
a
,
b
,
c
def
py_bin
():
"""
>>> py_bin()
(1, 2, 15)
(1,
-
2, 15)
"""
return
0b01
,
0b10
,
0b1111
return
0b01
,
-
0b10
,
0b1111
tests/run/list.pyx
View file @
4f208e13
cimport
cython
def
f
(
obj1
,
obj2
,
obj3
,
obj4
,
obj5
):
"""
>>> f(1, 2, 3, 4, 5)
...
...
@@ -54,6 +57,7 @@ def test_list_sort_reversed():
l1
.
sort
(
reversed
=
True
)
return
l1
@
cython
.
test_assert_path_exists
(
"//SimpleCallNode//NoneCheckNode"
)
def
test_list_reverse
():
"""
>>> test_list_reverse()
...
...
@@ -64,6 +68,17 @@ def test_list_reverse():
l1
.
reverse
()
return
l1
@
cython
.
test_assert_path_exists
(
"//SimpleCallNode//NoneCheckNode"
)
def
test_list_append
():
"""
>>> test_list_append()
[1, 2, 3, 4]
"""
cdef
list
l1
=
[
1
,
2
]
l1
.
append
(
3
)
l1
.
append
(
4
)
return
l1
def
test_list_pop
():
"""
>>> test_list_pop()
...
...
tests/run/metaclass.pyx
View file @
4f208e13
"""
>>> obj = Foo()
>>> obj.metaclass_was_here
True
"""
class
Base
(
type
):
def
__new__
(
cls
,
name
,
bases
,
attrs
):
attrs
[
'metaclass_was_here'
]
=
True
return
type
.
__new__
(
cls
,
name
,
bases
,
attrs
)
class
Foo
(
object
):
"""
>>> obj = Foo()
>>> obj.metaclass_was_here
True
"""
__metaclass__
=
Base
class
Py3Base
(
type
):
def
__new__
(
cls
,
name
,
bases
,
attrs
,
foo
=
None
):
attrs
[
'foo'
]
=
foo
return
type
.
__new__
(
cls
,
name
,
bases
,
attrs
)
def
__init__
(
self
,
cls
,
attrs
,
obj
,
foo
=
None
):
pass
@
staticmethod
def
__prepare__
(
name
,
bases
,
**
kwargs
):
return
{
'bar'
:
666
,
'dirty'
:
True
}
class
Py3Foo
(
object
,
metaclass
=
Py3Base
,
foo
=
123
):
"""
>>> obj = Py3Foo()
>>> obj.foo
123
>>> obj.bar
666
>>> obj.dirty
False
"""
dirty
=
False
tests/run/pure_mode_cmethod_inheritance_T583.pxd
0 → 100644
View file @
4f208e13
cdef
class
Base
:
cpdef
str
noargs
(
self
)
cpdef
str
int_arg
(
self
,
int
i
)
cpdef
str
_class
(
tp
)
cdef
class
Derived
(
Base
):
cpdef
str
noargs
(
self
)
cpdef
str
int_arg
(
self
,
int
i
)
cpdef
str
_class
(
tp
)
cdef
class
DerivedDerived
(
Derived
):
cpdef
str
noargs
(
self
)
cpdef
str
int_arg
(
self
,
int
i
)
cpdef
str
_class
(
tp
)
cdef
class
Derived2
(
Base
):
cpdef
str
noargs
(
self
)
cpdef
str
int_arg
(
self
,
int
i
)
cpdef
str
_class
(
tp
)
tests/run/pure_mode_cmethod_inheritance_T583.py
0 → 100644
View file @
4f208e13
class
Base
(
object
):
'''
>>> base = Base()
>>> print(base.noargs())
Base
>>> print(base.int_arg(1))
Base
>>> print(base._class())
Base
'''
def
noargs
(
self
):
return
"Base"
def
int_arg
(
self
,
i
):
return
"Base"
@
classmethod
def
_class
(
tp
):
return
"Base"
class
Derived
(
Base
):
'''
>>> derived = Derived()
>>> print(derived.noargs())
Derived
>>> print(derived.int_arg(1))
Derived
>>> print(derived._class())
Derived
'''
def
noargs
(
self
):
return
"Derived"
def
int_arg
(
self
,
i
):
return
"Derived"
@
classmethod
def
_class
(
tp
):
return
"Derived"
class
DerivedDerived
(
Derived
):
'''
>>> derived = DerivedDerived()
>>> print(derived.noargs())
DerivedDerived
>>> print(derived.int_arg(1))
DerivedDerived
>>> print(derived._class())
DerivedDerived
'''
def
noargs
(
self
):
return
"DerivedDerived"
def
int_arg
(
self
,
i
):
return
"DerivedDerived"
@
classmethod
def
_class
(
tp
):
return
"DerivedDerived"
class
Derived2
(
Base
):
'''
>>> derived = Derived2()
>>> print(derived.noargs())
Derived2
>>> print(derived.int_arg(1))
Derived2
>>> print(derived._class())
Derived2
'''
def
noargs
(
self
):
return
"Derived2"
def
int_arg
(
self
,
i
):
return
"Derived2"
@
classmethod
def
_class
(
tp
):
return
"Derived2"
tests/run/set.pyx
View file @
4f208e13
...
...
@@ -56,6 +56,15 @@ def test_set_clear():
s1
.
clear
()
return
s1
def
test_set_clear_None
():
"""
>>> test_set_clear_None()
Traceback (most recent call last):
AttributeError: 'NoneType' object has no attribute 'clear'
"""
cdef
set
s1
=
None
s1
.
clear
()
def
test_set_list_comp
():
"""
>>> type(test_set_list_comp()) is _set
...
...
tests/run/type_inference.pyx
View file @
4f208e13
...
...
@@ -350,8 +350,7 @@ cdef object some_float_value():
@
cython
.
test_fail_if_path_exists
(
'//NameNode[@type.is_pyobject = True]'
)
@
cython
.
test_assert_path_exists
(
'//InPlaceAssignmentNode/NameNode'
,
'//NameNode[@type.is_pyobject]'
,
@
cython
.
test_assert_path_exists
(
'//NameNode[@type.is_pyobject]'
,
'//NameNode[@type.is_pyobject = False]'
)
@
infer_types
(
None
)
def
double_loop
():
...
...
tests/run/unicode_indexing.pyx
View file @
4f208e13
...
...
@@ -243,7 +243,6 @@ def index_add(unicode ustring, Py_ssize_t i, Py_ssize_t j):
@
cython
.
test_assert_path_exists
(
"//CoerceToPyTypeNode"
,
"//IndexNode"
,
"//InPlaceAssignmentNode"
,
"//CoerceToPyTypeNode//IndexNode"
)
@
cython
.
test_fail_if_path_exists
(
"//IndexNode//CoerceToPyTypeNode"
)
def
index_concat_loop
(
unicode
ustring
):
...
...
tests/run/unicodemethods.pyx
View file @
4f208e13
...
...
@@ -180,9 +180,12 @@ pipe_sep = u'|'
@
cython
.
test_fail_if_path_exists
(
"//CoerceToPyTypeNode"
,
"//CoerceFromPyTypeNode"
,
"//CastNode"
,
"//TypecastNode"
)
"//CastNode"
,
"//TypecastNode"
,
"//SimpleCallNode//AttributeNode[@is_py_attr = true]"
)
@
cython
.
test_assert_path_exists
(
"//PythonCapiCallNode"
)
"//SimpleCallNode"
,
"//SimpleCallNode//NoneCheckNode"
,
"//SimpleCallNode//AttributeNode[@is_py_attr = false]"
)
def
join
(
unicode
sep
,
l
):
"""
>>> l = text.split()
...
...
@@ -197,9 +200,11 @@ def join(unicode sep, l):
@
cython
.
test_fail_if_path_exists
(
"//CoerceToPyTypeNode"
,
"//CoerceFromPyTypeNode"
,
"//CastNode"
,
"//TypecastNode"
,
"//NoneCheckNode"
)
"//CastNode"
,
"//TypecastNode"
,
"//NoneCheckNode"
,
"//SimpleCallNode//AttributeNode[@is_py_attr = true]"
)
@
cython
.
test_assert_path_exists
(
"//PythonCapiCallNode"
)
"//SimpleCallNode"
,
"//SimpleCallNode//AttributeNode[@is_py_attr = false]"
)
def
join_sep
(
l
):
"""
>>> l = text.split()
...
...
@@ -212,6 +217,22 @@ def join_sep(l):
"""
return
u'|'
.
join
(
l
)
@
cython
.
test_assert_path_exists
(
"//SimpleCallNode"
,
"//SimpleCallNode//NameNode"
)
def
join_unbound
(
unicode
sep
,
l
):
"""
>>> l = text.split()
>>> len(l)
8
>>> print( pipe_sep.join(l) )
ab|jd|sdflk|as|sa|sadas|asdas|fsdf
>>> print( join_unbound(pipe_sep, l) )
ab|jd|sdflk|as|sa|sadas|asdas|fsdf
"""
join
=
unicode
.
join
return
join
(
sep
,
l
)
# unicode.startswith(s, prefix, [start, [end]])
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment